From 9567b9dd1623743c8d033bd7187b9127ffe9b426 Mon Sep 17 00:00:00 2001 From: Jose Storopoli Date: Fri, 15 Dec 2023 10:16:31 -0300 Subject: [PATCH] feat: Sparse Regression (#59) * fix(flake): some latexmk tweaks * feat(sparse): add references * fix(sparse): added content before hierarchical models * feat(sparse): content sparse regression * feat(sparse): add data from glmnet * feat(turing): sparse regression * ci: turing sparse models * ci: stan models rearranging * feat(stan): sparse regression * ci(stan): print output * ci(stan): run 4 chains and print summary * ci(turing): print summary * feat(turing): fix sparse horseshoe and horseshoe_p * feat(stan): sparse horseshoe * feat(turing): model comparison show output * feat(stan): sparse horseshoe + * feat(stan): sparse finnish horseshoe * feat(stan): sparse r2d2 --- .github/workflows/models.yml | 45 +- README.md | 8 + datasets/sparse_regression.csv | 101 + flake.nix | 8 +- slides/.gitignore | 3 +- slides/.latexmkrc | 4 +- slides/10-Sparse_Regression.tex | 379 +++ ..._Models.tex => 11-Hierarchical_Models.tex} | 0 slides/{11-MCMC.tex => 12-MCMC.tex} | 0 ...Comparison.tex => 13-Model_Comparison.tex} | 0 slides/flake.lock | 186 -- slides/flake.nix | 61 - slides/references.bib | 79 + slides/slides.tex | 12 +- slides/tikz/{placeholder => .gitkeep} | 0 stan/11-sparse_horseshoe_regression.stan | 84 + stan/12-sparse_horseshoe_p_regression.stan | 85 + ...3-sparse_finnish_horseshoe_regression.stan | 131 + stan/14-sparse_r2d2_regression.stan | 105 + ...hical_varying_intercept-non_centered.stan} | 0 ...=> 15-hierarchical_varying_intercept.stan} | 0 ...hierarchical_varying_intercept_slope.stan} | 0 ...7-model_comparison-negative_binomial.stan} | 0 ....stan => 17-model_comparison-poisson.stan} | 0 ...ison-zero_inflated-negative_binomial.stan} | 0 ...del_comparison-zero_inflated-poisson.stan} | 0 stan/sparse_regression.data.json | 2308 +++++++++++++++++ turing/01-predictive_checks.jl | 5 +- turing/02-linear_regression-kidiq.jl | 2 + turing/03-logistic_regression-wells.jl | 1 + turing/04-ordinal_regression-esoph.jl | 1 + turing/05-poisson_regression-roaches.jl | 1 + turing/06-robust_linear_regression-duncan.jl | 1 + ...7-robust_beta_binomial_regression-wells.jl | 1 + turing/08-robust_robit_regression-wells.jl | 1 + ...st_negative_binomial_regression-roaches.jl | 1 + ...ed_regression-negative_binomial-roaches.jl | 1 + ...ero_inflated_regression-poisson-roaches.jl | 1 + turing/11-sparse_horseshoe.jl | 91 + turing/12-sparse_horseshoe_p.jl | 93 + turing/13-sparse_finnish_horseshoe.jl | 105 + turing/14-sparse_r2d2.jl | 101 + ...-hierarchical_varying_intercept-cheese.jl} | 1 + ..._varying_intercept-non_centered-cheese.jl} | 1 + ...rchical_varying_intercept_slope-cheese.jl} | 1 + ...ches.jl => 17-model_comparison-roaches.jl} | 4 +- 46 files changed, 3744 insertions(+), 268 deletions(-) create mode 100644 datasets/sparse_regression.csv create mode 100644 slides/10-Sparse_Regression.tex rename slides/{10-Hierarchical_Models.tex => 11-Hierarchical_Models.tex} (100%) rename slides/{11-MCMC.tex => 12-MCMC.tex} (100%) rename slides/{12-Model_Comparison.tex => 13-Model_Comparison.tex} (100%) delete mode 100644 slides/flake.lock delete mode 100644 slides/flake.nix rename slides/tikz/{placeholder => .gitkeep} (100%) create mode 100644 stan/11-sparse_horseshoe_regression.stan create mode 100644 stan/12-sparse_horseshoe_p_regression.stan create mode 100644 stan/13-sparse_finnish_horseshoe_regression.stan create mode 100644 stan/14-sparse_r2d2_regression.stan rename stan/{11-hierarchical_varying_intercept-non_centered.stan => 15-hierarchical_varying_intercept-non_centered.stan} (100%) rename stan/{11-hierarchical_varying_intercept.stan => 15-hierarchical_varying_intercept.stan} (100%) rename stan/{12-hierarchical_varying_intercept_slope.stan => 16-hierarchical_varying_intercept_slope.stan} (100%) rename stan/{13-model_comparison-negative_binomial.stan => 17-model_comparison-negative_binomial.stan} (100%) rename stan/{13-model_comparison-poisson.stan => 17-model_comparison-poisson.stan} (100%) rename stan/{13-model_comparison-zero_inflated-negative_binomial.stan => 17-model_comparison-zero_inflated-negative_binomial.stan} (100%) rename stan/{13-model_comparison-zero_inflated-poisson.stan => 17-model_comparison-zero_inflated-poisson.stan} (100%) create mode 100644 stan/sparse_regression.data.json create mode 100644 turing/11-sparse_horseshoe.jl create mode 100644 turing/12-sparse_horseshoe_p.jl create mode 100644 turing/13-sparse_finnish_horseshoe.jl create mode 100644 turing/14-sparse_r2d2.jl rename turing/{11-hierarchical_varying_intercept-cheese.jl => 15-hierarchical_varying_intercept-cheese.jl} (98%) rename turing/{11-hierarchical_varying_intercept-non_centered-cheese.jl => 15-hierarchical_varying_intercept-non_centered-cheese.jl} (98%) rename turing/{12-hierarchical_varying_intercept_slope-cheese.jl => 16-hierarchical_varying_intercept_slope-cheese.jl} (99%) rename turing/{13-model_comparison-roaches.jl => 17-model_comparison-roaches.jl} (99%) diff --git a/.github/workflows/models.yml b/.github/workflows/models.yml index 46f9c61..8c441f9 100644 --- a/.github/workflows/models.yml +++ b/.github/workflows/models.yml @@ -29,10 +29,14 @@ jobs: "09-robust_negative_binomial_regression-roaches.jl", "10-robust_zero_inflated_regression-negative_binomial-roaches.jl", "10-robust_zero_inflated_regression-poisson-roaches.jl", - "11-hierarchical_varying_intercept-cheese.jl", - "11-hierarchical_varying_intercept-non_centered-cheese.jl", - "12-hierarchical_varying_intercept_slope-cheese.jl", - "13-model_comparison-roaches.jl", + "11-sparse_horseshoe.jl", + "12-sparse_horseshoe_p.jl", + "13-sparse_finnish_horseshoe.jl", + "14-sparse_r2d2.jl", + "15-hierarchical_varying_intercept-cheese.jl", + "15-hierarchical_varying_intercept-non_centered-cheese.jl", + "16-hierarchical_varying_intercept_slope-cheese.jl", + "17-model_comparison-roaches.jl", ] steps: - name: Checkout repository @@ -116,28 +120,44 @@ jobs: data: "roaches.data.json", }, { - model: "11-hierarchical_varying_intercept-non_centered", + model: "11-sparse_horseshoe_regression", + data: "sparse_regression.data.json", + }, + { + model: "12-sparse_horseshoe_p_regression", + data: "sparse_regression.data.json", + }, + { + model: "13-sparse_finnish_horseshoe_regression", + data: "sparse_regression.data.json", + }, + { + model: "14-sparse_r2d2_regression", + data: "sparse_regression.data.json", + }, + { + model: "15-hierarchical_varying_intercept-non_centered", data: "cheese.data.json", }, { - model: "11-hierarchical_varying_intercept", + model: "15-hierarchical_varying_intercept", data: "cheese.data.json", }, { - model: "12-hierarchical_varying_intercept_slope", + model: "16-hierarchical_varying_intercept_slope", data: "cheese.data.json", }, { - model: "13-model_comparison-negative_binomial", + model: "17-model_comparison-negative_binomial", data: "roaches.data.json", }, - { model: "13-model_comparison-poisson", data: "roaches.data.json" }, + { model: "17-model_comparison-poisson", data: "roaches.data.json" }, { - model: "13-model_comparison-zero_inflated-negative_binomial", + model: "17-model_comparison-zero_inflated-negative_binomial", data: "roaches.data.json", }, { - model: "13-model_comparison-zero_inflated-poisson", + model: "17-model_comparison-zero_inflated-poisson", data: "roaches.data.json", }, ] @@ -155,4 +175,5 @@ jobs: run: | echo "Compiling: ${{ matrix.stan.model }}" nix develop -L . --command bash -c "stan stan/${{ matrix.stan.model }}" - nix develop -L . --command bash -c "stan/${{ matrix.stan.model }} sample data file=stan/${{ matrix.stan.data }}" + nix develop -L . --command bash -c "stan/${{ matrix.stan.model }} sample num_chains=4 data file=stan/${{ matrix.stan.data }}" + nix develop -L . --command bash -c "stansummary output_*.csv" diff --git a/README.md b/README.md index 09f3cba..8141e00 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,7 @@ Here is a brief table of contents: 1. **Bayesian Ordinal Regression** 1. **Bayesian Regression with Count Data: Poisson Regression** 1. **Robust Bayesian Regression** +1. **Bayesian Spase Regression** 1. **Hierarchical Models** 1. **Markov Chain Monte Carlo (MCMC) and Model Metrics** 1. **Model Comparison: Cross-Validation and Other Metrics** @@ -86,6 +87,8 @@ These were tested with `Turing.jl` version 0.21.9 and Julia 1.7.3. Source: Gelman and Hill (2007). - `duncan` (robust regression): data from occupation's prestige filled with outliers. Source: Duncan (1961). +- `sparse_regression` (sparse regression): simulated data from the [`glmnet` R package](https://cran.r-project.org/package=glmnet). + Source: Tay, Narasimhan and Hastie (2023). - `cheese` (hierarchical models): data from cheese ratings. A group of 10 rural and 10 urban raters rated 4 types of different cheeses (A, B, C and D) in two samples. Source: Boatwright, McCulloch and Rossi (1999). @@ -174,6 +177,10 @@ The papers section of the references are divided into **required** and **complem - Amrhein, V., Greenland, S., & McShane, B. (2019). Scientists rise up against statistical significance. _Nature_, _567_(7748), 305–307. https://doi.org/[10.1038/d41586-019-00857-9](https://doi.org/10.1038/d41586-019-00857-9) +- Piironen, J. & Vehtari, A. (2017). Sparsity information and regularization in the + horseshoe and other shrinkage priors. + _Electronic Journal of Statistics_. _11_(2), 5018-5051. + https://doi.org/10.1214/17-EJS1337SI - van Ravenzwaaij, D., Cassey, P., & Brown, S. D. (2018). A simple introduction to Markov Chain Monte–Carlo sampling. _Psychonomic Bulletin and Review_, _25_(1), 143–154. @@ -270,6 +277,7 @@ The papers section of the references are divided into **required** and **complem - Boatwright, P., McCulloch, R., & Rossi, P. (1999). Account-level modeling for trade promotion: An application of a constrained parameter hierarchical model. _Journal of the American Statistical Association_, 94(448), 1063–1073. - Breslow, N. E. & Day, N. E. (1980). **Statistical Methods in Cancer Research. Volume 1: The Analysis of Case-Control Studies**. IARC Lyon / Oxford University Press. - Duncan, O. D. (1961). A socioeconomic index for all occupations. Class: Critical Concepts, 1, 388–426. +- Tay JK, Narasimhan B, Hastie T (2023). Elastic Net Regularization Paths for All Generalized Linear Models. _Journal of Statistical Software_, 106(1), 1–31. doi:10.18637/jss.v106.i01. - Gelman, A., & Hill, J. (2007). **Data analysis using regression and multilevel/hierarchical models**. Cambridge university press. diff --git a/datasets/sparse_regression.csv b/datasets/sparse_regression.csv new file mode 100644 index 0000000..05e05f7 --- /dev/null +++ b/datasets/sparse_regression.csv @@ -0,0 +1,101 @@ +"x1","x2","x3","x4","x5","x6","x7","x8","x9","x10","x11","x12","x13","x14","x15","x16","x17","x18","x19","x20","y" +0.273856207444503,-0.036672201779096,0.854726937504524,0.967524214701756,1.41548975273268,0.523405872097659,0.56268817936726,1.11122332712396,1.64082139108541,0.618706707445803,0.999934827419799,-0.0784191603765545,-0.603326103942687,0.0332316788370116,-0.700884530699312,1.15783793088888,1.45781559335195,0.774906992171236,-1.26851773908902,1.99358000863254,-1.27488603195869 +2.24481689268639,-0.546030016142993,0.23406506726493,-1.33503042666267,1.3130758167705,0.521274584820675,-0.610034625097462,-0.861396505196466,-0.27046346058683,0.230082533746431,-0.105708978645508,0.163141221429295,0.762076611235272,0.678120034658062,-0.528266982696816,-0.87915477122761,-0.472913401951692,-1.11717308722471,-0.737732128702542,-1.07879293256602,1.84342510323916 +-0.125423034417563,-0.606878201926287,-0.853921693817499,-0.148777202613475,-0.66468279011433,0.606616412277758,0.161720650046395,-0.862721652491622,0.604210153471434,1.19397681336983,0.501250938134614,-0.945205919809937,0.398902631211981,-0.764785046610817,1.28540187145122,0.644876709820694,0.179245528241142,0.0447375641475657,1.10530710799217,0.304054516129315,0.45923631556268 +-0.54357344128812,1.10835827274681,-0.104247994046094,1.01652622766499,0.69990417711699,1.65501641973393,0.489963464198278,0.0233820879641371,0.256030370191788,-0.127313964536315,-0.0626284627126575,0.641954680800406,0.0754819840132362,-1.37846439752932,-1.0247390457468,-2.1183615202707,-0.469534506028022,0.697796161566865,0.865636177131047,-0.789489511219237,0.564040738165436 +-1.45939839398061,-0.274494522530397,0.111905961611328,-0.851787699888158,0.315283867023526,1.05074928232857,1.38635752951112,0.284501037678726,1.14049759930895,2.68134597466573,-1.01473386489479,0.367043721804833,1.73759744520768,-1.2661270561515,1.45199945278547,-0.789475641702451,-0.984385965703637,-1.63700992715094,-0.582916804830087,-1.52891036229226,1.87296325806198 +1.06320807488699,-0.753523174711692,-1.38255341242837,1.07622698480168,0.370033104002747,1.49872123762419,-0.360452532125179,-0.214215705717881,1.82664830772735,0.871122474719113,-0.0637292778807991,1.00507109933515,0.315207863881172,0.536718457161079,-1.26242272222612,-1.58486615366365,-0.631499126341178,-1.87874350371747,0.450428698039733,1.44226426241522,0.527531732089232 +0.115844992014276,-0.966302384188801,0.274179179651271,0.0185310251454063,-0.210387227929533,0.544089446635639,-2.50548936681868,2.19626902858519,0.59252698023899,-1.08759703323555,-1.46460447546525,0.57271842485202,-0.62113071729262,-0.644862919259716,-1.35047314510719,0.383421591003648,1.2629832215076,1.56076929999962,-0.963394862349788,-0.245102026158523,2.43465886785111 +0.389706727267654,0.399794284498825,-0.466669818239119,0.477628120062795,0.848291294661942,0.257435292454431,-0.099494846101807,0.999014244374147,-0.192535596235614,-0.404191348511082,2.20740008505155,-1.05877944937819,0.0957255874207629,0.661150265730257,-0.124705277787698,-0.436175599131596,1.55933293159113,0.705100585843505,-0.857781611654114,-0.732820633566933,-0.8945961209746 +0.193157257863123,0.197234258722836,-1.24311803569415,1.90936069679463,0.114854591593851,0.451589685588059,-0.267140446449913,-0.273467792450906,-1.6111022650495,-0.32555752842962,-0.678700831487926,-0.479052036959549,0.40779123006399,0.276409257451158,0.034061862169581,0.686192441983629,0.529493421356585,0.876963374247079,-0.508150612804797,-0.835163783017199,-0.205938391661497 +-0.0964255498023261,1.17715872690184,2.55154766265603,0.955689515208055,0.0255721083737314,-0.0456028254801544,-0.659452626751548,0.0391878771285303,0.171683893334249,0.643388199251565,-1.23755302893156,2.18024599853618,-0.526014097862977,-0.318712237026754,-0.725155287506575,-1.1938698768664,0.168373276826034,0.486037875848119,2.59468103483088,-0.847198859202136,3.11011884885933 +-0.94198342944249,-0.652854315411587,-0.938181620632853,0.406895726799665,-0.829658343140349,-0.0616750001686973,1.3659787194056,2.65359620915275,1.00206809886296,1.04133811591443,-0.387386434494066,-0.411431773780334,1.46436406864783,2.17521869174458,-0.484862124846808,1.9288941157332,0.00154261984796149,-1.46628242155071,-1.11529209898974,0.587006509453683,-4.17985553158222 +-0.0762892687624095,1.07242691640742,1.45853039053057,-0.63930611070872,0.813628054430233,-0.74447546618081,1.17654013552298,-0.0551003162437021,0.189869320725467,0.0651899003266117,0.0706319828284981,-0.22732351765365,-0.135802199079483,-0.367341579026999,-1.96110516771586,-0.484261882282337,0.29738787493299,-0.350304578013323,-0.332361341709466,-0.173004417119481,1.70014573437721 +0.809649196970223,-0.0183513849105259,-0.654414225516508,-1.49043958405195,-0.828158872306366,-1.12278363555522,0.146559043603529,-0.399052152289976,-1.4014517846518,-1.12578886572904,-0.407665191797062,-0.969991443913499,-1.9904698468209,-2.46219146955525,0.45923910705422,-0.309528549864511,-1.20496319094908,-0.418779123850595,-3.06435498772679,-0.149619324384578,2.71341896402807 +0.74727900895513,-0.524884045293033,1.4236374453838,-1.31833821364663,0.214661123638125,0.912590709223148,-0.412410876334964,-0.719830058872636,-0.802276602091343,-0.940459048601605,-0.594541016286786,0.761264221022891,-0.0196171636980003,0.396673475033981,-0.599535260651564,0.237284715273538,0.889581131808279,0.0117061503368527,-0.0441699470608208,0.0709817046476715,3.04202767290004 +-1.41670129672869,-0.763140127105275,0.868967130340468,-0.778902203685902,-1.276504958018,-0.412160453338109,0.393163635643849,1.03121939718823,2.021869155226,0.315800136942583,-1.30537750548905,1.38433128182148,0.0286660138040508,-0.640484989696767,-0.74582716338694,-1.3118793398552,1.85255094540662,0.266773497476441,-0.519226413980162,-0.285342840010378,0.522335380794531 +0.916808322011868,1.45581817379753,0.617047804912848,0.924846395461989,-0.826752588390756,1.5467850026826,1.04577603508772,-1.74054997456812,-0.971918210592847,-0.113988219792383,0.726677536657964,-0.919696566224955,0.336602030179408,0.7455960165479,0.269564172975194,-0.648792164570706,1.67902602370456,-1.41760369842147,-2.23509709932249,-0.601989371122817,3.58310767942288 +-0.75413202558559,-0.824825283511846,-0.653447161271663,-0.296743765820434,1.18255290909483,-0.135126117790195,1.03248103903117,0.407597010735734,-0.155701053277342,-2.2789493599089,-0.850043063588108,-0.0132101469930385,-0.505180471551627,1.77640467013671,-1.01535762478836,0.0145984950910476,-0.647835647752632,0.890253806882392,0.62180524563031,-0.0144463917376286,-4.17317882541808 +1.00901629859336,-0.105043257752352,-0.575589475276811,-0.747102267584892,-0.359610370028572,0.149024634936467,0.880064380714194,-2.19396667938693,0.744740762167459,-0.370674807095278,-1.36283254883802,1.63470284604422,0.798843585125753,-0.443415644945696,-0.540798258454989,-0.363145422812655,2.4228255168068,-2.38361741577941,-0.0282265646178997,1.20535583440782,-1.68427179854291 +1.86171747368031,-2.07578220184628,-0.199113156501026,0.670275976548367,-1.0347944873714,-1.33552382370267,0.822158138455839,-0.538599656868847,-0.196361041319276,1.25375594154167,0.307783271774494,-0.616957825219647,0.222203850732545,-1.09557267802619,-0.392224376864539,-0.327538520566922,0.219984372714833,0.710120811403728,1.2971718889023,-1.48708286087579,5.3033149973319 +-0.696004167282951,0.845570099029707,-1.00248565255882,0.529101361465526,-0.793522659500271,0.182698594590871,-0.587341007671628,-0.30906758192141,-0.409773503664693,-0.440145949098583,-0.351780911194588,-1.67218444912115,0.679844559734395,0.268922729336062,1.68112718842607,0.0553175972258811,2.56317322568398,0.292812835514623,1.20665391882855,-0.94886029156411,-1.26400794502879 +0.505507693860946,-1.79153719515451,-0.137137477441306,-0.24604818252069,-1.84720781062925,-0.288922868947442,-0.976153854925228,-1.09582782497859,-1.42702017976171,-0.955619103641084,0.226742714622358,-0.907004206793677,0.371637760825847,-0.212575634678858,-1.26523296851482,1.26317285776171,-0.360055058354822,-0.114572238673794,0.351137482073202,0.653633858891453,1.73517012878565 +-1.17492688368643,0.535664506486872,0.574848495173427,0.5683659124047,0.663768938970929,-0.0599903424228401,1.29057659183526,1.55592528753323,-0.576731043523645,-0.118087321519128,-0.251816052576465,1.02651726516624,-0.180867070477805,0.08526727309407,-0.76054238414435,-0.145217251447255,0.0608061284329148,0.302684908545763,0.0585892944886532,1.15170890479721,-3.48075844161381 +0.246914608356666,-0.581433067365614,1.65696172031376,1.32081399396151,-0.669467337668756,0.921247244115548,-0.340300377641215,-0.0190926346415671,-0.190962634251491,-1.37410341728568,-0.241289329130791,0.461867552196824,-0.386515001091379,-1.08332302551013,-0.461315199097382,0.831410996878713,1.29599142290154,-0.18344816476211,1.30600893199783,-0.755858586555573,6.26592288459783 +-1.89613011965395,0.487989759356288,1.58030992397205,-0.920222763569841,0.793179398514798,-1.30318517993267,0.619063328051595,0.975734741095818,-0.567911927160161,0.163979524902316,-0.0396600769385063,1.69352533615939,-2.31038404021135,0.633180953020973,-1.86791468686005,1.01773807104243,0.409267390070511,-2.42607899234253,-2.16038123284379,-0.138161335797987,-2.56676475884914 +0.0598650394497882,-0.288702145932067,0.402694339289576,0.267935174868245,1.35770998349287,-1.85162198253089,0.494298954698597,1.9372329526521,-1.35626629123254,0.571253664489988,-1.70199599924993,1.06985414029911,0.154614245050699,0.208385484775836,-0.169881527989001,-0.445263687117716,-1.23070433902032,0.0363276526586893,-0.347222074659487,-0.655980312444002,-0.912013830684209 +-1.49978500055228,0.948015649163439,-0.634743861330266,-1.21066072670648,0.504179440347165,-1.09960254951441,-0.0615886268530476,-0.220663618471907,-1.17008807799549,-1.68708290343739,0.979778419267449,-1.36662674325626,-0.923011138701667,-0.795909892967774,1.06284549910001,1.5414802985684,0.0959226134105741,-0.344452104543643,-1.10234451976164,-0.803214410843314,-0.751006785729847 +-0.352830465459373,-0.236603765551558,-1.38707392645021,0.11017123742133,-1.39393867175447,-1.51285186469936,-0.16245884255875,-2.16461992214033,-1.53765310812874,-1.19549179140719,1.10455039863447,1.27378363568306,-0.41600006714668,1.35332753791729,-1.16069658007135,-0.661214861376634,1.9528922079674,-0.779896654846974,1.18775765272432,0.314884889403281,-3.66883533372402 +-1.81849993188271,-0.761275310980207,-0.748429075712014,0.0559783891483592,-0.0562961244937134,0.142574641342857,-1.30796775532409,-0.248311532585943,0.331193517835215,1.83078505255457,-2.23932784158433,0.418891103838752,0.337986211864686,0.96583840033502,-0.00270665690496256,-1.24261678430722,-0.0278642123831258,-1.02653230770817,-0.0500239812054335,-0.682894819556998,-4.0007998788045 +3.16192125529609,-1.23793956935979,-0.702360908377316,0.0780086167952578,1.62457786312747,0.0907288653840403,-0.227455938190754,0.628022339296555,-1.18912336214418,-0.177423059874184,-0.398573290920884,0.392365793435501,0.114552092571297,1.45105072132875,1.4385833387418,-0.157121869159803,-0.437132437806752,0.998663437111232,1.25534924287975,-1.183611688055,1.44023624442189 +-0.455686430684038,-0.954399777379345,-2.5795862629687,-0.544679869570775,0.524462281213807,-0.894718114132061,-0.118764209789544,-0.752944493724349,0.461636960328607,0.207928767757689,-0.571187620935843,-1.97760127779171,0.216224939327685,0.362689917382242,-0.635098425965797,-0.588846708829979,-1.88520115023863,0.457889890807513,0.366199660740175,-1.00556154794052,-2.06202897560819 +-1.83403638738453,0.881677979663288,-0.825593602251031,2.29037170495062,-0.503816355487744,-1.07311550708707,-0.139197861577612,0.691497664878531,-0.708489779734334,-0.130109640600388,-0.311724158384063,0.762186503719265,-0.735080276187296,0.0630741111453173,-1.05127383428534,-0.0832692351430601,0.587663353359865,-0.9319375573439,0.240855240092352,0.505316148394979,-2.89308765136922 +0.849468646709612,0.444354065994558,-0.482678748121925,0.488278506709503,-0.399540648664754,0.918521261891089,-0.860391278836107,1.04701873502233,-0.233027518778341,-0.843584279103145,-1.44847762939855,-0.313367207907317,0.375116282332498,-1.02482848781651,-1.35779895863254,0.925323641542699,0.348298885245574,0.18090742984702,0.591244057186281,-0.90488369356436,3.12567428240168 +1.25651371023008,1.13837840059819,0.276767739044391,-0.446497214020744,0.630168706583515,-0.311094143856995,1.09221304274443,-1.06521401437804,-1.16410024094345,-0.0108893815491961,-0.417278771979697,1.48763211974277,-0.705850022730995,-1.72202980276973,0.271737406210524,0.0238910040365908,0.676471466553939,0.337567547053213,0.961120573087267,-1.34497966055623,5.53217677193772 +0.533238269828199,-1.1358514188425,-1.63099892775179,0.116352164814184,-0.17607113403718,-2.01879725263554,-0.716756032048972,-0.948408789284849,-2.1052374762841,0.285110835570463,0.688858319894544,-1.0498547721925,-1.64191688436921,-0.849596674109152,1.3441236957416,1.1212920043376,-1.43032353832581,1.0271699926893,1.24170361490502,0.565424911856043,-1.7407953012728 +1.27412848005901,-1.28158198392576,0.604092203980074,-1.0552532507639,0.913649683753289,2.27527279317425,0.440406093279366,-1.43168249171376,-0.166212286416072,0.186809192078086,-0.0905778596719629,-0.338386484294644,0.384098541969358,-0.622815274114306,-0.481909771779173,-0.557453208223386,-0.396075526046964,-2.31416857425114,0.933040420074124,-0.564253794549603,5.45280503066267 +-0.978256159051382,-0.924219096124441,0.03909339216709,0.995250488431529,-0.854847860595658,2.07916283549,2.08064730982222,0.110960127206109,-0.734449072686714,0.446253523748507,-0.975619748941575,0.158095759051923,-1.17423001619018,-0.613890177292518,-0.388434068259511,-0.775961274417843,-0.0104247310510443,-0.301829783482774,0.0213501362766561,0.25505431695065,1.84751668115537 +1.72830504241787,-2.87889524325048,1.01403646824589,-0.219108002902573,1.0335720260315,-1.23460536347887,2.18573416482445,-1.2130515872333,2.41487199173335,0.98650062753357,0.667056783858573,0.833565200614898,-0.65492969704128,0.986055626728173,0.696617585890058,1.67019698413158,-0.263093467903494,-0.770948059314962,2.19007622440246,0.0544314041470725,-1.27558298485892 +-0.0260090209582293,0.721130869869406,-1.62043176446499,3.11291183724182,0.385366669493457,-0.108039201004765,0.0866782679895744,0.326199910983168,0.828464658481415,0.953902738206196,0.465018377418759,0.391227763016679,-0.156942940866075,-0.207214274897331,1.40292301855232,1.14302593810642,-0.53559024401363,-0.153735562943926,-0.259660060377528,0.298459698602885,-1.06731436359299 +-0.293206190587227,1.1113797752171,0.955357406304192,2.47311382359433,0.284361506769806,-0.585198609166986,-0.656312531248397,-1.59081169250801,2.16269049261067,-0.0178433310781814,1.30287621341081,-0.130591007385621,-1.14856036546672,1.75718527688754,-1.76079701337016,-0.755305441907176,-0.195069727111589,0.230655979793704,0.801177080673191,0.131105523501017,-1.73760513546829 +0.321864887310657,0.493032088738059,0.433550316971498,-0.916615418003088,-0.737710471854404,-0.642234929013317,-0.703234420934156,2.17497392642832,-0.413132176520477,0.634480716302367,1.9194337138206,-1.38115002509953,0.379210639703391,-0.122700786354579,-0.455727633615022,0.657642552643451,0.269681229765261,-0.417251674595406,0.28626591562328,0.106351519622281,2.89056286364724 +0.589601385062281,1.91048816552635,0.63885139869166,-0.00861332704823776,-0.939021705554624,-0.198581002901274,-1.60618419887544,-1.02790915270392,-0.066441550849879,0.918364758972357,0.977246566081768,-1.43214859191924,-0.131005161417586,-1.13926922375261,0.556133326046465,1.23679268372689,-1.69418590703725,0.454103797740677,0.236281713826326,-0.307431273587189,5.01498857928119 +1.25533052340145,0.342701451174279,0.808550058530894,-1.66606337424371,1.75293812269621,1.02303544187943,-0.181561109708889,0.205506014316109,1.04205130772828,1.44398439520967,-1.67543752146724,-1.36265770379703,-0.276585298182517,-0.500199969386733,0.909051737951046,0.969135725064105,-1.04852561273634,-1.41831107606649,0.432024938107326,-1.27340701648458,4.44293906975649 +-0.988812202777334,0.0040270111073786,1.46539479599065,-0.0091018124480025,-1.02387382148599,-1.68145673369001,1.97923220479701,-0.132036608182369,0.813280850812479,-0.867476886774158,-1.17703800416678,0.0518513832274923,0.536424513704611,1.34632920721149,-0.246644024463852,-1.21466195998806,-0.879739286662119,-0.415648886097474,-0.395223942580523,-0.254556730676825,-1.68041773622904 +-0.311745928547791,1.7072111187115,-0.0755266396493468,-0.660726847284053,-0.886117974637542,-1.04314134701496,0.420222847224799,0.407762063745821,0.151455925048234,0.562387840221166,-0.191324227900154,-0.115653649062738,1.02340070431204,-1.28395069684613,0.745032464665668,-0.879872225333195,-0.715749330182496,-1.07936917265281,0.431390177894896,-0.0991685832762514,2.4295159218935 +1.40197010967618,-2.40827797727488,0.454523098141284,1.2019545300898,0.465990217155428,0.898587249581772,0.387253353610746,-1.19535089117827,-0.729542845150667,-0.904352172455497,-0.526397272962171,-1.50802498656234,-0.650207644545078,-0.0291759688380332,-0.413490403973007,0.495071133564036,0.675974321356044,1.12588759428088,-0.732803103878809,-0.57703427159543,3.38612723427831 +1.22284913088032,0.666164032696407,0.900271345333632,0.965334607511614,-2.68543062857132,-0.118444666354878,1.5777428299956,0.355008344910167,-0.111257112240381,1.36888279670145,-0.600798180874183,1.05289489887281,-1.04505166442953,-0.1182619349848,0.211602510739406,1.41464015797884,-0.124022150505464,0.768727099452319,1.12435219817314,0.18976484375065,4.70594554439949 +-1.24153959889039,1.71000943432548,1.68485305832054,-1.03572697884434,1.57331224571118,0.649935501275439,-1.5619317715992,-0.317674923919809,0.0206888320747084,1.64921140418044,-0.413821906423743,0.624863242442626,0.865322533480648,0.245626080903828,-0.272146344860666,2.13515348931695,-0.556962397535267,-0.843245885565188,0.179347722378022,-0.152786985645033,-2.81310139695326 +1.36366767621065,0.0283119164231682,1.46187793717392,2.61417411176738,-0.799400209869018,0.425915181356144,0.152598185884214,-0.881765622117174,0.859245655678883,0.572838999806655,0.626843090039838,-0.971549505416084,-1.36495881093721,-0.331546103489994,-0.399403750283326,0.93859342351282,0.610697188483085,-2.47142929733057,-0.289543299576137,-0.741584505493347,5.24274318040352 +-0.455773435801759,-1.13756191200553,-0.445400240296447,0.943563570041481,-0.180880802750629,-1.29206481389473,-1.04213028043921,-0.969754320395442,0.639483795386125,1.00049762524589,-0.10088754984616,0.582985119528237,-0.54179249399851,-0.126181584545142,-1.17144275084267,1.22799530043366,1.02759227419178,0.0435431570191354,1.39713001034131,0.0523935324831789,-2.79509181046727 +1.08381292540755,-0.427136533272075,-0.170942066278103,0.0121986142083453,-0.855594212894735,0.374782547978267,-0.900288279617532,0.432458307138193,-0.251703494671416,-0.160596658268209,-0.519857790505829,1.47209547771623,0.896312640389972,0.270633487240641,-1.03189013777765,-1.8447882877246,0.603736085055394,-0.155348604773572,-1.72283805091332,-0.321862214120637,2.94334792656806 +-0.0605714464506548,-1.8436328566779,0.956836734847006,0.385113466053637,-0.13535429815988,0.0803886528977342,-0.21398073047476,0.314285989873926,-0.777569390361644,-0.307173878271144,0.448151930428544,-1.46584123421769,-1.39414661981901,-2.23659507105305,-0.166628736687892,1.20070648097599,1.18955153158735,-0.691325550636565,0.807707803193653,-0.0426903550803495,4.029735182272 +1.95686072541338,1.9564673689393,0.266840348255496,-0.757979250691139,-1.43763959687218,-1.45793309531131,1.21996527303471,2.04965272076458,-0.853130221282545,-0.286770131895987,1.87903546264481,0.375539814809844,-0.602964703915958,0.953203333975297,-0.911206916412832,0.492707231420853,1.78695425220146,-0.691739672092991,1.29922243691453,-1.27857876155367,4.78821270596234 +0.208764430301538,-1.60420558941809,-1.48506506787287,0.330363405583337,-0.757981746890006,0.402462197670179,0.0820539605995427,-0.0439427997009636,0.00771479679183497,-0.211553902273085,-0.971649422093961,0.363845917979794,-0.913483132010666,0.252248251868974,0.770736470387369,0.804113273140514,0.278254779501152,-0.263958435511815,-0.710256861039492,-0.753790811936257,-0.104873704923682 +-0.0609104851063286,-0.401063558898297,-2.63061483849689,0.994835574153596,0.0577673555362238,0.526870785079879,-0.724649593374088,-1.89843742670639,1.43906184818053,0.458919019798053,-2.19429571307065,0.0407453925874218,-0.964417935868955,0.699920325146655,-0.213364431876113,-1.43168740893658,0.155268976826211,1.18143286802386,0.529007809899343,-0.131507031316693,-3.37382560163273 +0.0479377946881908,-0.68238312519775,-0.982051571159994,0.809937657901133,-0.0845125988038,-1.43037037325781,0.221497338429761,1.20528272187985,0.584147819319417,-0.541829128442428,-1.11394770101168,-1.02142640904968,-0.658586994213721,-0.987250778465459,-0.626973935198293,-0.816080620105349,0.435592367027571,-0.729584905995168,1.28163636862116,0.174749647297636,0.833041405563335 +1.59534777847875,0.141249591167048,-1.39275532421938,-0.326564444113002,0.353760511957998,2.26355513319988,0.322117901632102,-0.580763046176868,-1.26265094212537,-1.41797086428641,-0.874133363052523,-0.271916423196484,1.15852680109897,-0.701452973559545,-0.923879773713501,1.87087208636581,-0.37008986327386,-0.92706918759142,-0.47910728488318,-1.08540881340541,3.16275310128282 +1.55003623629665,0.629237114227703,-0.494874994852019,1.35002400079176,0.0748046538315752,0.47055919051463,0.26182714172689,-0.56741237545119,0.106232374766145,-0.895502519165606,0.966423863089819,-1.21908308251375,0.180635704605299,-2.25236662284165,0.24087419865652,-0.349067399378507,2.25831486618014,-0.601087684391727,-0.539950013921897,-0.175164022242605,2.76915437657991 +0.739950548825545,0.422671491842258,0.282565724320462,0.0252540227606795,-0.96639589213612,1.18681907740826,1.24646734686802,-0.107357892847182,0.301307843882229,-1.48529739898143,0.440267419756453,0.116173765459116,0.14268927298613,-1.73506800926756,-0.572629233942057,-2.22929289258803,0.264110706897241,0.632982976516442,1.07582875416584,2.517910884722,3.24969089146869 +-0.152899450558034,-0.399889031431797,0.229180293599742,-0.100207211614467,-1.91423344376341,1.2822104509804,0.936191137802847,0.665566466318913,0.730533098171168,-1.30401961821487,-0.903732590797126,0.863896448858506,-0.438423962808966,-1.54349729485192,-0.83801857139615,0.602399620267423,-1.9424813131152,0.841712237120029,1.452735245724,-1.05739605694814,5.38435576611305 +-0.480774640286938,-0.112832261955056,-1.57723235116823,1.63468356861177,-0.447509742417772,-1.33595628725721,-0.835785454915303,-0.667029077616192,-0.38133295661875,-0.0674813375401042,0.143742045071371,-0.0596658969927958,0.991860828431931,1.92887611703292,-0.387401261452768,0.51077547334481,-0.38696093783878,2.07892287950386,-1.49894366635237,-0.714815940265108,-3.01579713203092 +-0.446845696668647,-0.378193234473879,0.223883167812648,0.120817272320312,-0.660011985619802,0.320869755188978,0.225268378336963,-1.26140039127561,-0.419284088778079,1.46242302336873,0.55407869055599,-0.123203549454958,0.589071519058694,-0.770343307176377,-0.632393091186424,0.731109690248918,-0.2307034376377,0.632792481010738,0.375775891100876,1.50651180130862,0.962725207944861 +0.220523578918624,1.46000352817275,1.47488389451548,-1.03079717257127,-0.820086440810247,-1.08546204350677,-1.96322277876661,0.611056936945936,-0.0856907116869529,2.03067038516693,0.260953490782883,-0.134315769216459,0.122580335186365,-0.811322709864129,-0.0981091428200048,0.567171035132151,0.166512219612771,0.534832273221319,-2.83996473742519,1.31842654737676,1.71212835244446 +-0.186847564102154,-0.855846236883277,-0.504343442548662,1.31873911001313,0.568716110600615,-0.590352435858134,1.67345843146631,-0.974275977378361,-0.305988482395025,-0.629962375451263,-0.0389818314987491,0.328961996563533,-0.892732464862759,0.6314523081906,-1.01299190932119,-0.84681875222135,2.2812776457863,-0.396769805722602,-0.17010997686475,0.362384432116595,-1.74059882606484 +-0.462719465365117,0.763261761639426,1.09868866638728,-0.752542040515773,-1.56333813136539,-0.0760202002987871,0.103700183222908,0.697780116264688,-0.172221990870649,-0.0460944717402586,0.387514980021676,1.09087429155335,0.266780688732293,-2.42555530386586,0.292953895640668,0.610745275159585,-0.665292348036788,-0.493942056655912,-0.513470075355577,0.464806439971161,4.21613429450899 +1.12932934860095,-0.47761278295517,-1.1841385447812,1.94495032855684,0.39761585102727,1.08986728638742,-1.84881072938261,-0.314679761285995,0.368000611750672,-0.608782330431485,-1.41790479132597,-0.0511353068409533,-0.860924556863242,1.8269647720128,-1.03464830985379,0.893981545521465,-2.01322205748842,-0.373639048614075,-0.818226244514762,-0.986929892596752,-0.448859761238955 +0.650467832700602,1.50628757802239,0.559220892854754,0.646622729302395,-0.241184338701108,1.21390119587904,1.44749024051339,0.726975261227751,0.690622306515231,-0.304273708143507,-1.91616898665167,-1.28343345334855,1.95274799094355,-0.461280703364826,1.28867077793202,-0.803591807800544,-0.0644349154294264,0.156776107384916,0.146685517902326,0.120892728460919,3.93492646058891 +0.0218855205408524,1.15302850034267,0.58975756160086,0.52284758284033,-0.151662796423953,2.10951324055962,1.13479427222544,0.347758103616198,0.416561455159609,1.46460242255874,-0.829827658088243,-0.874574026206652,-0.70661211565925,1.11108228383911,-0.620572293225716,-1.44596616685657,0.837998638377407,-1.18555134670787,-0.612401604568894,-0.570309669021958,1.27463240360548 +1.43074970968068,-0.462380578111351,-0.763337847944802,1.21855415777689,-0.144975858137624,-0.586251966295829,0.28386987848446,-0.58011506051046,0.469559078774248,0.716403085921291,-0.0279452358994628,0.254920192229204,1.21488143326183,1.46740959485364,0.382228220448773,0.0469444276478085,0.992109199368469,-0.0919924806036624,1.2116282966251,0.15972386499091,0.279513889137621 +-0.471389066256887,2.22878632962026,1.43005861029535,-0.422965944418844,-0.705360536321777,1.82212361523061,-0.759904458804462,-0.591647249593223,0.406522750741025,-0.848076997340906,-0.327572484111923,-1.64451290932415,-0.553823396169657,-0.366153708906292,0.0102402890366879,-0.923290911522614,0.809017478001353,0.440118803348495,-0.323277727206965,0.943447315849176,-0.0811827839685548 +-0.198454186147328,-0.381236860145321,1.28403731316374,0.160727455982395,-0.134558966072704,-0.624903111092264,-1.70906581421634,0.60157834057088,2.16546542855883,-1.23710286924041,-1.08717895341046,1.03865764439273,0.900800508142745,-1.97306945893888,-0.0578819760558766,-0.198237472100714,-0.48991211843091,1.09077914236216,-1.52135985963298,0.503596553412721,1.46993802668759 +0.754614363216477,-0.270479393843073,1.25003602751278,-1.34504261308841,1.8198094476092,-0.293567650576978,0.207490682643208,-1.33538498792914,-1.83171084578747,0.960715166671445,-0.379947607720597,-1.61271540269747,-0.0915904334223011,-0.688811776827638,0.797543281333536,-0.0281186102252473,0.359415571004924,0.594249646523093,0.40855641564504,0.101560015614577,0.351257459221261 +1.77001712581763,-0.476692102601824,-1.15946095675912,0.153178089037739,-0.0326154692711829,0.183572935624985,-0.328566171118381,1.48877461569195,-2.88985029391954,1.05355777943548,1.92317456319887,-0.696825197066012,-0.633913061869664,-0.0346541365280485,0.523928593350026,-0.764725256419012,-0.193973882580795,0.721921536271782,0.189430787181758,-1.77060930823652,5.89639515900785 +-0.412708670725309,0.710096471321059,1.46622885223942,-2.05928119882731,-0.991475233473189,-0.364905607304493,0.497721102312757,0.669703782949911,-0.110366028591796,0.265806856409026,-1.1319759480912,1.40546377293452,0.670102959534624,-0.995274357731124,-1.31901287101029,0.418978493743138,-0.261660395014126,-0.606789447561743,0.384968520931677,0.816387804668174,1.3728698587127 +-0.343650329344964,1.08514782594814,-0.24648879355453,-0.561056575865195,0.77017722849689,0.0436387335423728,-1.20531053711128,-0.922649578584811,-1.77813451748759,1.1869195116462,-1.75159636019773,-2.44166886753447,-0.653727201778782,0.18069128546461,0.934842203459043,-0.524279210133209,-0.717716366430477,-1.99705894225146,1.29938561139974,-0.520141792344796,-2.1561879216987 +0.190204200780166,-0.578532930054981,-0.369463126463117,-1.37064497594725,0.381789438960944,1.55467484157753,-1.08415932825212,0.215303725587503,0.495530064531812,0.809236966369318,-1.17189939016352,-0.202844083449005,1.1534893212714,0.390595012762367,1.01509354698934,-0.463230000247669,-0.377570915874488,0.0469504606786842,1.48371375127759,0.681886194500108,-0.67755588651167 +0.724347413109047,0.433293370290925,0.673370133019715,-0.712811662053238,-1.23334646986375,0.869269552133972,1.33017513063763,-0.414666583383377,-1.03658634181582,-0.444857470942617,-1.40507464254096,0.194147576294516,0.29187328081143,-1.73841661033819,-0.338788705833753,0.537127068473183,0.224913904243461,-0.445706812788214,0.456009804777668,0.774273842197533,1.93778117572862 +0.966058846776072,0.027276673006142,0.323330020252102,-1.79326646908198,0.238000171612873,-1.15199968744041,0.66483251263882,0.730004145055638,1.25479912929786,1.63019039553793,0.805073819158168,-1.1529309048897,-0.21921411821452,0.275891098879921,-0.403880207827811,-0.0210787197748874,-1.48430434834444,-1.10612530482135,0.961387080753616,0.177920153566045,1.13990744134656 +0.651637948184663,0.206434190313796,-0.126959356669887,0.935772023180675,0.0927012266929792,0.276343680593951,-1.8072720137558,-0.737222157643903,0.433853396981645,0.544012637404257,2.28308041695592,1.50489990746925,1.68032193914457,0.680235098888798,-1.216017870454,0.592282275537646,0.800688030629124,-0.801143431550209,-0.350010649757495,2.28071859147998,-1.98512729697249 +0.435429144925779,-0.299007656505102,-0.250903189985598,-0.582734576518564,-0.539853088995285,-0.558257517757211,0.514520971894665,-1.05314134458594,0.223700062531479,-0.0866507992943054,1.48187381654059,0.637855961337285,-1.1995701774818,-1.24718557397563,1.40586286233407,0.713648147804437,1.0625687317329,0.11753922707959,-0.722714091467735,-0.285513677367012,2.50814660131822 +2.32600198189837,1.0542053321619,-0.418054601602944,-0.119737306311983,-1.75180288889132,0.874962413255575,0.744103905346723,-0.610799312664182,0.525031905470231,0.842607894147583,0.048292211676346,0.657279233215879,0.0140849948083788,-0.372801081790932,0.968787979760728,1.51460366906594,-0.969846296289096,1.33476916206959,0.211949929411825,-0.230892225792263,6.11767177283067 +-1.31002034154393,-0.312330273121048,-1.24024992474358,0.116827303560313,0.718685275228086,-0.0895531533773457,-0.3067132212293,1.89453347442353,0.0740623831049782,0.173957190303947,-0.240773400173152,-0.874205261945932,-2.09492544791915,-1.31104693071806,1.16561104503259,1.20423656623622,0.331001413869602,-1.72719811134217,0.741545023497531,1.10226205620032,-1.36087870217716 +-1.12367538387488,0.939051041516592,0.724836279423398,0.217200935721648,-0.551107755383666,0.65423013951124,-0.523152402617118,0.505933535238856,-0.222922233188522,-0.108198451857403,-1.71014188995533,1.39094873297985,-0.517616546140124,-0.971438401798193,-0.496169448862825,1.66396430257756,0.592356141146787,0.0541886376851136,-0.0638944313788845,0.560647167442103,1.4944802149898 +0.771704957344734,0.434157761345277,0.255868927855839,-0.52080938165204,1.56654529712412,-0.900719451849877,0.549519193810983,0.878177306772462,-1.25279264862269,-1.01338725561181,-1.15993658033688,-0.483847207539026,-0.395605214500926,1.52293135516511,0.181465914106846,-0.677237527402621,-0.174162202884863,-1.21182107059632,-0.0585879850437506,-2.34577712407818,0.22007889586201 +0.418358389022197,0.0506749701108666,-0.436535548570126,0.0106155793199871,0.392477999503498,0.0525297977518677,0.0698927074812599,0.0136206960491372,-0.50275298207833,1.4602651057921,-0.861571183178397,0.0528173684073667,0.710660011825569,-1.69583400115393,0.696117062279713,0.74904425010556,-0.61630267527187,0.0456401270990335,1.32244150732308,-1.54629008569164,3.34452839738744 +1.27051399900169,1.51940299647922,-1.01737906529687,0.172793432958818,0.546202734409706,-0.911391579442917,0.515692859903676,0.276669696611399,0.398244924904535,-0.287943775992369,0.418321373027919,-0.59051271240142,-0.0314321581184302,0.555503812202553,0.834870692771924,0.247726019962469,0.129811001059563,0.5983572416325,-0.07199468395536,1.18598278663541,-1.51870398140463 +-1.48578494370753,-1.31654848718166,1.63568979958702,1.40698469507377,-0.96541429284115,-0.653326744472561,0.437991772891555,-0.700594371819923,-1.45182975238439,-0.217272364796798,-0.856443702555918,0.0852591620066388,1.24966876111158,0.0591764293592248,0.30538057796962,-0.240658488383928,-1.43063867013548,-1.43537734738398,-0.187845553286668,-0.972855648431218,0.706354768439591 +1.41464281477416,-0.928119476374438,-0.399631321850464,0.213634833794199,-1.29620475947076,0.59467825944211,0.0234050809675681,0.59126770807768,0.0498184454057414,-1.24917134928906,1.36220014769426,0.0302556182698352,-0.140572856058455,1.05443287382077,-0.412124260946478,-0.248744857062485,0.188519087348553,-0.184126662670868,1.73370161943726,0.13558985454485,1.8930040986949 +1.31179445198912,1.93427466661522,1.5764603031343,-1.12708711255841,0.929573444350472,-1.5172805947025,0.949568044663512,-0.135062914639728,-0.472571060744177,1.65279009386003,-0.344270412934531,0.177415215607209,-1.62162222075161,2.33006528646549,-0.43171565843341,-0.296671089316597,0.333609631748338,1.56140935174449,0.523855490161075,0.0456313142369957,-1.19202558000887 +0.63967503939376,1.00262066325392,-0.470802375370779,-0.553156665672183,-0.783199874578241,1.19044910942198,-2.13499711625258,-0.593998280457172,1.98557575808538,-1.43859924547601,0.16471321774271,-1.99929106214103,-0.98348225010805,-0.613489303838668,-0.00650415473385969,0.444186771229829,-1.66950588370666,-1.16634073107085,1.65511885900227,-0.221137874296958,2.11070841782041 +1.1373121720121,0.779824116369828,-0.12477807360127,0.726133114564483,-0.862750742567102,0.451425762836254,0.119129167048202,-1.12514476023892,-0.70267078790048,0.219153888535902,-1.07909155262735,0.14742351787082,0.0399894028890636,-1.23751612379947,2.00063974698035,-0.0523413494233804,0.188962103157533,-0.0866603135550793,-0.784796233472089,0.683410534971045,2.48075147379848 +0.223052827646988,-0.750934716488311,0.549575110435376,0.405817416942179,0.0411157553314881,0.236369397836289,-0.783249455230407,-3.63672760565443,-1.22922647821416,2.28503426771012,-1.66896276978717,2.3416197957324,-0.846978835387351,2.09900014128664,-1.54003843013085,-0.175545795793322,-1.16686073812134,-1.31287143735196,-1.43523737802431,1.00245370162821,-4.74435304094856 +-1.68970195757865,0.012508902924978,0.860274242994504,-0.564131455992466,-0.102142008526685,0.946458933830986,1.19114299479007,0.0972057739514852,0.20120210571319,0.328277314436196,-0.520564317703519,1.62075943034094,1.14974902464443,1.23480482891746,2.25017854723498,0.954588160175011,-0.52839051596008,-1.46708525655908,0.903802089344198,-0.599269900098586,-2.12334157814763 +-0.732033887214069,0.132661337789623,1.61038579184646,-0.0646362878503381,1.42996031129886,-2.18116209817493,-0.0840629310591911,-0.302294655534019,-1.73868365732235,-1.65110012634123,-0.560070232949305,1.3560185284608,2.75605763290248,-1.1899198390782,-0.813771237829753,-1.11074564545941,0.154404895269282,-0.746866033089674,1.80880714916578,-0.059224334624038,-1.38994117005565 +-0.564562740990852,-0.330606311956591,-0.0640129014099094,-0.665518953931539,0.498427543797971,0.153216810773414,-0.711691098851467,-1.12637099767303,-0.578124105487692,1.00128832519083,1.14604082133429,-1.34870865907357,0.533730684074568,0.381880299082026,0.0488781209076191,-0.299448854888301,-1.03274419068093,-0.78983196465213,1.00242384170559,0.480465331497851,-3.96469665629743 +0.990530117250574,-0.781913297678883,-0.0206933510124791,-0.607005918550468,1.42915934345331,-1.30722769190645,0.616589996933067,-0.799011044914765,-0.804836717319897,0.274559762285885,0.350204488020162,-1.04770202243292,0.354765073595398,-0.875474887586532,1.54796593565191,-0.990836431918816,-1.32345230426618,0.256433709091836,-0.076270307165032,-0.393548510957033,1.20492092634377 +-0.577303960841578,-0.852462875022879,0.365727792337694,-1.06860491577873,1.09601779909389,-2.78802833056883,0.88366364210643,-1.38647652472276,0.477753822623214,0.505988852592847,-1.02560501030334,-0.763058986373484,0.0299316984988655,1.13584106965825,0.391126050135707,-0.534296020319409,-1.10644631338125,-1.45909056464581,0.0172444039990005,0.613780858215174,-6.8574850361641 +-1.6318284851982,-0.0935940630963397,-0.799562937630869,1.10015579056397,0.657608244908476,1.32245398111943,0.058805106576132,0.00469764460688375,-0.0142499000354734,0.483364655516024,0.47455914221578,-0.194074417367148,-0.544403215327899,-0.908923003836084,0.755647704515933,0.222131636643983,-0.948918835115333,-0.0799950968541162,1.64070823805464,0.65034087831353,-3.01411434442159 +-0.59332408923295,-0.465276171085573,0.960478439925254,1.52945034510291,1.81446422501241,-0.342492022251219,-1.12542389134899,-0.149205825367921,-0.805910158324905,-0.559082401761724,1.03111528432607,-0.733304168128845,-2.71954418752116,0.214427995757602,0.354372411311667,0.393252520957273,-1.23861105946727,-0.593093698418677,-1.45618139992905,0.900698165351196,-3.21329756529944 +1.5054799833425,-0.668687339454584,1.26371195036875,-0.406897374518751,-0.954262172785985,2.27246524102193,-1.38869644341981,0.619171582255533,0.23488917045977,-0.296662934661662,0.942890236215858,1.06159971647822,0.64849141598123,0.122469244111175,-0.676304369624014,-0.390561231096804,0.0541215534605907,-3.43253861349413,-3.19442171328131,0.107658159384497,4.50173294472998 +-0.484087253217202,-0.615728458187521,-0.0291957743353218,0.849463103082336,-0.748716328839608,-1.18975963722832,-0.671887318438369,-0.919862034177609,0.0557187790475029,-1.18202423747701,-1.09033155981261,0.827017868707679,-0.100496384695953,-1.65627074553877,0.356511817099089,0.269935278342628,-1.15260336299003,0.473108715340719,0.643635049728466,1.74630324524807,-3.31892875672626 diff --git a/flake.nix b/flake.nix index ad6c571..67ac805 100644 --- a/flake.nix +++ b/flake.nix @@ -37,6 +37,12 @@ typos cmdstan julia + + # LaTeX part + coreutils + tex + gnuplot + biber ]; shellHook = '' @@ -60,7 +66,7 @@ export PATH="${pkgs.lib.makeBinPath buildInputs}"; cd slides export HOME=$(pwd) - latexmk -pdflatex -shell-escape slides.tex + latexmk -pdflatex -shell-escape -interaction=nonstopmode slides.tex ''; installPhase = '' mkdir -p $out diff --git a/slides/.gitignore b/slides/.gitignore index de4cf22..850d2fd 100644 --- a/slides/.gitignore +++ b/slides/.gitignore @@ -1,5 +1,6 @@ tikz/* -!tikz/placeholder +!tikz/.gitkeep *.dat *.script *.table +*.pdf diff --git a/slides/.latexmkrc b/slides/.latexmkrc index 93813d3..17232d9 100644 --- a/slides/.latexmkrc +++ b/slides/.latexmkrc @@ -1 +1,3 @@ -$pdflatex='pdflatex -shell-escape -write18'; \ No newline at end of file +$latex = 'latex -interaction=nonstopmode -shell-escape'; +$pdflatex = 'pdflatex -interaction=nonstopmode -shell-escape'; +$clean_ext = "bbl nav out snm dat table script vrb dvi auxlock"; diff --git a/slides/10-Sparse_Regression.tex b/slides/10-Sparse_Regression.tex new file mode 100644 index 0000000..41d2616 --- /dev/null +++ b/slides/10-Sparse_Regression.tex @@ -0,0 +1,379 @@ +% !TeX root = slides.tex +\section{Sparse Regression} + +\subsection{Recommended References} +\begin{frame}{Recommended References} + \begin{vfilleditems} + \item \textcite{gelman2020regression} - Chapter 12, Section 12.8: Models for regression coefficients + \item Horseshoe Prior: \textcite{carvalho2009handling} + \item Horseshoe+ Prior: \textcite{bhadra2015horseshoe} + \item Regularized Horseshoe Prior: \textcite{piironen2017horseshoe} + \item R2-D2 Prior: \textcite{zhang2022bayesian} + \item Betancourt's Case study on Sparsity: \textcite{betancourtSparsityBlues2021} + \end{vfilleditems} +\end{frame} + +\subsection{Sparsity} +\begin{frame}{What is Sparsity?} + Sparsity is a concept frequently encountered in statistics, signal processing, + and machine learning, which refers to situations where the vast majority of + elements in a dataset or a vector are zero or close to zero. +\end{frame} + +\begin{frame}{How to Handle Sparsity?} + Almost all techniques deal with some sort of \textbf{variable selection}, + instead of altering data. + + This makes sense from a Bayesian perspective, + as data is \textbf{information}, + and we don't want to throw information away. +\end{frame} + +\subsection{Frequentist Approach} +\begin{frame}{Frequentist Approach} + The frequentist approach deals with sparse regression by staying in the + ``optimization'' context but adding \textbf{Lagrangian constraints}\footnote{ + this is called \textbf{LASSO} (least absolute shrinkage and selection operator) + from \textcite{tibshirani1996regression,zou2005regularization}. + }: + + $$ + \min_\beta \left\{ \sum _{i=1}^N ( y_i - \alpha - x_i^T \boldsymbol{\beta} )^2 \right\} + $$ + + suject to $\vert \boldsymbol{\beta} \vert_p \leq t$. + + \vfill + + Here $\vert \cdot \vert_p$ is the $p$-norm. +\end{frame} + +\subsection{Variable Selection Techniques} +\begin{frame}{Variable Selection Techniques} + \begin{vfilleditems} + \item \textbf{discrete mixtures}: \textit{spike-and-slab} prior + \item \textbf{shrinkage priors}: \textit{Laplace} prior + and \textit{horseshoe} prior \parencite{carvalho2009handling} + \end{vfilleditems} +\end{frame} + +\subsection{Discrete Mixtures} +\subsubsection{Spike-and-Slab Prior} +\begin{frame}{Discrete Mixtures -- Spike-and-Slab Prior} + \small + Mixture of two distributions—one that is + concentrated at zero (the ``spike'') and one with a much wider spread (the + ``slab''). This prior indicates that we believe most coefficients in our model are + likely to be zero (or close to zero), but we allow the possibility that some are + not. + + \vfill + Here is the Gaussian case: + + $$ + \begin{aligned} + \beta_i \mid \lambda_i, c &\sim \text{Normal} \left( 0, \sqrt{\lambda_i^2 c^2} \right) \\ + \lambda_i &\sim \text{Bernoulli} (p) + \end{aligned} + $$ + + where: + \begin{vfilleditems} + \small + \item $c$: \textit{slab} width + \item $p$: prior inclusion probability; + encodes the prior information about the sparsity of the + coefficient vector $\boldsymbol{\beta}$ + \item $\lambda_i \in \{0, 1\}$: whether the coefficient $\beta_i$ is close + to zero (comes from the ``spike'', $\lambda_i = 0$) or nonzero + (comes from the ``slab'', $\lambda_i = 1$) + \end{vfilleditems} +\end{frame} + +\begin{frame}{Discrete Mixtures -- Spike-and-Slab Prior} + \centering + \begin{tikzpicture} + \begin{axis}[every axis plot, line width=2pt, + ylabel=PDF, + domain=-4:4,samples=200, + axis x line*=bottom, % no box around the plot, only x and y axis + axis y line*=left, % the * suppresses the arrow tips + enlarge x limits=true, % extend the axes a bit + ] + + \addplot [blue] {hs(1,0.05}; + \addlegendentry{spike: $c=1, \lambda=0$} + \addplot [red] {hs(1,1)}; + \addlegendentry{slab: $c=1, \lambda=1$} + \end{axis} + \end{tikzpicture} +\end{frame} + +\subsection{Shinkrage Priors} +\subsubsection{Laplace Prior} +\begin{frame}{Shinkrage Priors -- Laplace Prior} + The Laplace distribution is a continuous probability distribution named after + Pierre-Simon Laplace. + It is also known as the double exponential distribution. + + It has parameters: + \begin{vfilleditems} + \item $\mu$: location parameter + \item $b$: scale parameter + \end{vfilleditems} + + The PDF is: + + $$ + \text{Laplace}(\mu, b) = \frac{1}{2b} e^{-\frac{| x - \mu |}{b}} + $$ + + \vfill + + It is a symmetrical exponential decay around $\mu$ with scale governed by $b$. +\end{frame} + +\begin{frame}{Shinkrage Priors -- Laplace Prior} + \centering + \begin{tikzpicture} + \begin{axis}[every axis plot, line width=2pt, + ylabel=PDF, + domain=-4:4,samples=200, + axis x line*=bottom, % no box around the plot, only x and y axis + axis y line*=left, % the * suppresses the arrow tips + enlarge x limits=true, % extend the axes a bit + ] + + \addplot [blue] {laplace(1)}; + \addlegendentry{$\mu=0, b=1$} + \end{axis} + \end{tikzpicture} +\end{frame} + +\subsubsection{Horseshoe Prior} +\begin{frame}{Shinkrage Priors -- Horseshoe Prior} + The horseshoe prior\parencite{carvalho2009handling} assumes that each coefficient + $\beta_i$ is conditionally independent with density + $P_{\text{HS}}(\beta_i \mid \tau )$, where $P_{\text{HS}}$ + can be represented as a scale mixture of Gaussians: + + $$ + \begin{aligned} + \beta_i \mid \lambda_i, \tau &\sim \text{Normal} \left( 0, \sqrt{\lambda_i^2 \tau^2} \right) \\ + \lambda_i &\sim \text{Cauchy}^+ (0, 1) + \end{aligned} + $$ + + where: + \begin{vfilleditems} + \item $\tau$: \textit{global} shrinkage parameter + \item $\lambda_i$: \textit{local} shrinkage parameter + \item $\text{Cauchy}^+$ is the half-Cauchy distribution for the + standard deviation $\lambda_i$ + \end{vfilleditems} + + \vfill + + \small + Note that it is similar to the spike-and-slab, but the discrete mixture becomes + a ``continuous'' mixture with the $\text{Cauchy}^+$. +\end{frame} + +\begin{frame}{Shinkrage Priors -- Horseshoe} + \centering + \begin{tikzpicture} + \begin{axis}[every axis plot, line width=2pt, + ylabel=PDF, + domain=-4:4,samples=200, + axis x line*=bottom, % no box around the plot, only x and y axis + axis y line*=left, % the * suppresses the arrow tips + enlarge x limits=true, % extend the axes a bit + ] + + \addplot [blue] {hs(1,1}; + \addlegendentry{$\tau=1, \lambda=1$} + \addplot [red] {hs(1,0.5)}; + \addlegendentry{$\tau=1, \lambda=0.5$} + \end{axis} + \end{tikzpicture} +\end{frame} + +\subsubsection{Discrete Mixtures versus Shinkrage Priors} +\begin{frame}{Discrete Mixtures versus Shinkrage Priors} + \textbf{Discrete mixtures} offer the correct representation of sparse problems + \parencite{carvalho2009handling} by placing positive prior probability on + $\beta_i = 0$ (regression coefficient), + but pose several difficulties: mostly computational due to the + \textbf{non-continuous nature}. + + \textbf{Shrinkage priors}, despite not having the best representation of sparsity, + can be very attractive computationally: again due to the \textbf{continuous property}. +\end{frame} + +\begin{frame}{Horseshoe versus Laplace} + The advantages of the Horseshoe prior over the Laplace prior are primarily: + + \begin{vfilleditems} + \small + \item \textbf{shrinkage}: + The Horseshoe prior has infinitely heavy tails and an infinite spike at zero. + Parameters estimated under the Horseshoe prior can be shrunken towards zero + more aggressively than under the Laplace prior, + promoting sparsity without sacrificing the ability to detect true non-zero signals. + \item \textbf{signal} detection: + Due to its heavy tails, + the Horseshoe prior does not overly penalize large values, + which allows significant effects to stand out even in the presence + of many small or zero effects. + \item \textbf{uncertainty} quantification: + With its heavy-tailed nature, + the Horseshoe prior better captures uncertainty in parameter estimates, + especially when the truth is close to zero. + \item \textbf{regularization}: In high-dimensional settings where the number of + predictors can exceed the number of observations, + the Horseshoe prior acts as a strong regularizer, + automatically adapting to the underlying sparsity level without the + need for external tuning parameters. + \end{vfilleditems} +\end{frame} + +\begin{frame}{Effective Shinkrage Comparison} + Makes more sense to compare the shinkrage effects of the proposed approaches + so far. + Assume for now that $\sigma^2 = \tau^2 = 1$, + and define $\kappa_i = \frac{1}{1 + \lambda_i^2}$. + Then $\kappa_i$ is a random shrinkage coefficient, + and can be interpreted as the amount of weight that the posterior mean for + $\beta_i$ places on $0$ once the data $\textbf{y}$ have been observed: + + $$ + \operatorname{E}(\beta_i \mid y_i, \lambda_i^2) = + \left( \frac{\lambda_i^2}{1 + \lambda_i^2} \right) y_i + + \left( \frac{1}{1 + \lambda_i^2} \right) 0 = + (1 - \kappa_i) y_i + $$ +\end{frame} + +\begin{frame}{Effective Shinkrage Comparison\footnote{spike-and-slab with $p=\frac{1}{2}$ + would be very similar to Horseshoe but with discontinuities.}} + \centering + \begin{tikzpicture} + \begin{axis}[every axis plot, line width=2pt, + ylabel=PDF, + xlabel=$\kappa$, + domain=0.001:0.999,samples=200, + axis x line*=bottom, % no box around the plot, only x and y axis + axis y line*=left, % the * suppresses the arrow tips + enlarge x limits=true, % extend the axes a bit + ] + + \addplot [blue] {beta(0.5,0.5)}; + \addlegendentry{Laplace} + \addplot [red] {shrinkagelaplace(0.5)}; + \addlegendentry{Horseshoe} + \end{axis} + \end{tikzpicture} +\end{frame} + +\subsubsection{Horseshoe+ Prior} +\begin{frame}{Shinkrage Priors -- Horseshoe+} + Natural extension from the Horseshoe that has improved performance with + highly sparse data \parencite{bhadra2015horseshoe}. + + \vfill + + Just introduce a new half-Cauchy mixing variable $\eta_i$ in the Horseshoe: + + $$ + \begin{aligned} + \beta_i \mid \lambda_i, \eta_i, \tau &\sim \text{Normal}(0, \lambda_i) \\ + \lambda_i \mid \eta_i, \tau &\sim \text{Cauchy}^+ (0, \tau \eta_i) \\ + \eta_i &\sim \text{Cauchy}^+ (0, 1) + \end{aligned} + $$ + + where: + \begin{vfilleditems} + \item $\tau$: \textit{global} shrinkage parameter + \item $\lambda_i$: \textit{local} shrinkage parameter + \item $\eta_i$: additional \textit{local} shrinkage parameter + \item $\text{Cauchy}^+$ is the half-Cauchy distribution for the + standard deviation $\lambda_i$ and $\eta_i$ + \end{vfilleditems} +\end{frame} + +\subsubsection{Regularized Horseshoe Prior} +\begin{frame}{Shinkrage Priors -- Regularized Horseshoe} + The Horseshoe and Horseshoe+ guarantees that the strong signals will not + be overshrunk. + However, this property can also be harmful, + especially when the parameters are weakly identified. + + \vfill + + The solution, Regularized Horseshoe \parencite{piironen2017horseshoe} + (also known as the ``Finnish Horseshoe''), + is able to control the amount of shrinkage for the largest coefficient. +\end{frame} + +\begin{frame}{Shinkrage Priors -- Regularized Horseshoe} + $$ + \begin{aligned} + \beta_i \mid \lambda_i, \tau, c &\sim \text{Normal} \left( 0, \sqrt{\tau^2 \tilde{\lambda_i}^2} \right) \\ + \tilde{\lambda_i}^2 &= \frac{c^2 \lambda_i^2}{c^2 + \tau^2 \lambda_i^2} \\ + \lambda_i &\sim \text{Cauchy}^+ (0, 1) + \end{aligned} + $$ + + where: + \begin{vfilleditems} + \small + \item $\tau$: \textit{global} shrinkage parameter + \item $\lambda_i$: \textit{local} shrinkage parameter + \item $c > 0$: regularization constant + \item $\text{Cauchy}^+$ is the half-Cauchy distribution for the + standard deviation $\lambda_i$ + \end{vfilleditems} + \small + Note that when $\tau^2 \lambda_i^2 \ll c^2$ (coefficient $\beta_i \approx 0$), + then $\tilde{\lambda_i}^2 \to \lambda_i^2$; + and when $\tau^2 \lambda_i^2 \gg c^2$ (coefficient $\beta_i$ far from $0$), + then $\tilde{\lambda_i}^2 \to \frac{c^2}{\tau^2}$ and $\beta_i$ prior + approaches $\text{Normal}(0,c)$. +\end{frame} + +\subsubsection{R2-D2 Prior} +\begin{frame}{Shinkrage Priors -- R2-D2} + Still, we can do better. + The \textbf{R2-D2}\footnote{ + $R^2$-induced Dirichlet Decomposition + } prior \parencite{zhang2022bayesian} has heavier tails and + higher concentration around zero than the previous approaches. + + The idea is to, instead of specifying a prior on $\boldsymbol{\beta}$, + we construct a prior on the coefficient of determination $R^2$\footnote{ + square of the correlation coefficient between the dependent variable + and its modeled expectation.}. + Then using that prior to ``distribute'' throughout the $\boldsymbol{\beta}$. +\end{frame} + +\begin{frame}{Shinkrage Priors -- R2-D2} + $$ + \begin{aligned} + R^2 &\sim \text{Beta} \left( \mu_{R^2} \sigma_{R^2}, (1 - \mu_{R^2}) \sigma_{R^2} \right) \\ + \boldsymbol{\phi} &\sim \text{Dirichlet}(J, 1) \\ + \tau^2 &= \frac{R^2}{1 - R^2} \\ + \boldsymbol{\beta} &= Z \cdot \sqrt{\boldsymbol{\phi} \tau^2} + \end{aligned} + $$ + where: + \begin{vfilleditems} + \small + \item $\tau$: \textit{global} shrinkage parameter + \item $\boldsymbol{\phi}$: proportion of total variance allocated to each covariate, + can be interpreted as the \textit{local} shrinkage parameter + \item $\mu_{R^2}$ is the mean of the $R^2$ parameter, generally $\frac{1}{2}$ + \item $\sigma_{R^2}$ is the precision of the $R^2$ parameter, generally $2$ + \item $Z$ is the standard Gaussian, i.e. $\text{Normal}(0, 1)$ + \end{vfilleditems} +\end{frame} diff --git a/slides/10-Hierarchical_Models.tex b/slides/11-Hierarchical_Models.tex similarity index 100% rename from slides/10-Hierarchical_Models.tex rename to slides/11-Hierarchical_Models.tex diff --git a/slides/11-MCMC.tex b/slides/12-MCMC.tex similarity index 100% rename from slides/11-MCMC.tex rename to slides/12-MCMC.tex diff --git a/slides/12-Model_Comparison.tex b/slides/13-Model_Comparison.tex similarity index 100% rename from slides/12-Model_Comparison.tex rename to slides/13-Model_Comparison.tex diff --git a/slides/flake.lock b/slides/flake.lock deleted file mode 100644 index 9d6929b..0000000 --- a/slides/flake.lock +++ /dev/null @@ -1,186 +0,0 @@ -{ - "nodes": { - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-utils": { - "inputs": { - "systems": "systems" - }, - "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "flake-utils_2": { - "inputs": { - "systems": "systems_2" - }, - "locked": { - "lastModified": 1685518550, - "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "gitignore": { - "inputs": { - "nixpkgs": [ - "pre-commit-hooks", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", - "owner": "hercules-ci", - "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "gitignore.nix", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1699186365, - "narHash": "sha256-Pxrw5U8mBsL3NlrJ6q1KK1crzvSUcdfwb9083sKDrcU=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "a0b3b06b7a82c965ae0bb1d59f6e386fe755001d", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-stable": { - "locked": { - "lastModified": 1685801374, - "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "c37ca420157f4abc31e26f436c1145f8951ff373", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-23.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1689261696, - "narHash": "sha256-LzfUtFs9MQRvIoQ3MfgSuipBVMXslMPH/vZ+nM40LkA=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "df1eee2aa65052a18121ed4971081576b25d6b5c", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": "flake-compat", - "flake-utils": "flake-utils_2", - "gitignore": "gitignore", - "nixpkgs": "nixpkgs_2", - "nixpkgs-stable": "nixpkgs-stable" - }, - "locked": { - "lastModified": 1700922917, - "narHash": "sha256-ej2fch/T584b5K9sk1UhmZF7W6wEfDHuoUYpFN8dtvM=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "e5ee5c5f3844550c01d2131096c7271cec5e9b78", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, - "root": { - "inputs": { - "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", - "pre-commit-hooks": "pre-commit-hooks" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "systems_2": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/slides/flake.nix b/slides/flake.nix deleted file mode 100644 index 010720a..0000000 --- a/slides/flake.nix +++ /dev/null @@ -1,61 +0,0 @@ -{ - description = "A basic flake with a shell"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - inputs.flake-utils.url = "github:numtide/flake-utils"; - inputs.pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; - - outputs = { self, nixpkgs, flake-utils, pre-commit-hooks }: - flake-utils.lib.eachDefaultSystem (system: - let - pkgs = nixpkgs.legacyPackages.${system}; - tex = pkgs.texlive.combine { - inherit (pkgs.texlive) scheme-small; - inherit (pkgs.texlive) latexmk pgf pgfplots tikzsymbols biblatex beamer; - inherit (pkgs.texlive) silence appendixnumberbeamer fira fontaxes mwe; - inherit (pkgs.texlive) noto csquotes babel helvetic transparent; - inherit (pkgs.texlive) xpatch hyphenat wasysym algorithm2e listings; - inherit (pkgs.texlive) lstbayes ulem subfigure ifoddpage relsize; - inherit (pkgs.texlive) adjustbox media9 ocgx2 biblatex-apa wasy; - }; - in - { - checks = { - pre-commit-check = pre-commit-hooks.lib.${system}.run { - src = ./.; - hooks = { - typos.enable = true; - }; - }; - }; - - devShells.default = pkgs.mkShell { - packages = with pkgs;[ - bashInteractive - # pdfpc # FIXME: broken on darwin - typos - ]; - - inherit (self.checks.${system}.pre-commit-check) shellHook; - }; - packages.default = pkgs.stdenvNoCC.mkDerivation rec { - name = "slides"; - src = self; - buildInputs = with pkgs; [ - coreutils - tex - gnuplot - biber - ]; - phases = [ "unpackPhase" "buildPhase" "installPhase" ]; - buildPhase = '' - export PATH="${pkgs.lib.makeBinPath buildInputs}"; - export HOME=$(pwd) - latexmk -pdflatex -shell-escape slides.tex - ''; - installPhase = '' - mkdir -p $out - cp slides.pdf $out/ - ''; - }; - }); -} diff --git a/slides/references.bib b/slides/references.bib index 82ab823..f2271c0 100644 --- a/slides/references.bib +++ b/slides/references.bib @@ -1514,3 +1514,82 @@ @misc{MixedModels year = {2022}, month = {Jul}, } +@inproceedings{carvalho2009handling, + title = {Handling sparsity via the horseshoe}, + author = {Carvalho, Carlos M and Polson, Nicholas G and Scott, James G}, + booktitle = {Artificial intelligence and statistics}, + pages = {73--80}, + year = {2009}, + organization = {PMLR} +} +@misc{bhadra2015horseshoe, + title = {The Horseshoe+ Estimator of Ultra-Sparse Signals}, + author = {Anindya Bhadra and Jyotishka Datta and Nicholas G. Polson and Brandon Willard}, + year = {2015}, + eprint = {1502.00560}, + archivePrefix = {arXiv}, + primaryClass = {math.ST} +} +@article{piironen2017horseshoe, + author = {Juho Piironen and Aki Vehtari}, + title = {{Sparsity information and regularization in the horseshoe and other shrinkage priors}}, + volume = {11}, + journal = {Electronic Journal of Statistics}, + number = {2}, + publisher = {Institute of Mathematical Statistics and Bernoulli Society}, + pages = {5018 -- 5051}, + keywords = {Bayesian inference, horseshoe prior, shrinkage priors, Sparse estimation}, + year = {2017}, + doi = {10.1214/17-EJS1337SI}, + URL = {https://doi.org/10.1214/17-EJS1337SI} +} +@article{zhang2022bayesian, + title = {Bayesian regression using a prior on the model fit: The r2-d2 shrinkage prior}, + author = {Zhang, Yan Dora and Naughton, Brian P and Bondell, Howard D and Reich, Brian J}, + journal = {Journal of the American Statistical Association}, + volume = {117}, + number = {538}, + pages = {862--874}, + year = {2022}, + publisher = {Taylor \& Francis} +} +@online{betancourtSparsityBlues2021, + title = {Sparsity Blues}, + url = { + https://betanalpha.github.io/assets/case_studies/modeling_sparsity.html + }, + titleaddon = {Beta \& Alpha}, + author = {Betancourt, Michael}, + urldate = {2023-12-09}, + date = {2021-07}, +} +@article{tibshirani1996regression, + title = {Regression shrinkage and selection via the lasso}, + author = {Tibshirani, Robert}, + journal = {Journal of the Royal Statistical Society Series B: Statistical Methodology}, + volume = {58}, + number = {1}, + pages = {267--288}, + year = {1996}, + publisher = {Oxford University Press} +} +@article{zou2005regularization, + title = {Regularization and variable selection via the elastic net}, + author = {Zou, Hui and Hastie, Trevor}, + journal = {Journal of the Royal Statistical Society Series B: Statistical Methodology}, + volume = {67}, + number = {2}, + pages = {301--320}, + year = {2005}, + publisher = {Oxford University Press} +} + @article{glmnet, + title = {Elastic Net Regularization Paths for All Generalized Linear Models}, + author = {J. Kenneth Tay and Balasubramanian Narasimhan and Trevor Hastie}, + journal = {Journal of Statistical Software}, + year = {2023}, + volume = {106}, + number = {1}, + pages = {1--31}, + doi = {10.18637/jss.v106.i01}, + } diff --git a/slides/slides.tex b/slides/slides.tex index 3ff6c75..1b0d8cb 100644 --- a/slides/slides.tex +++ b/slides/slides.tex @@ -170,6 +170,7 @@ \tikzset{ declare function={ + f2jacobian(\x)=\x^(-2);% discreteuniform(\a,\b)=1/(\b-\a+1);% binomial(\n,\p)=\n!/(x!*(\n-x)!)*\p^x*(1-\p)^(\n-x);% poisson(\l)=(\l^x)*exp(-\l)/(x!);% @@ -183,6 +184,10 @@ student(\n)=gamma((\n+1)/2.)/(sqrt(\n*pi)*gamma(\n/2.))*((1+(x*x)/\n)^(-(\n+1)/2.));% cauchy(\m,\s)=(1/(pi*\s*(1+((x-\m)/\s)^2)));% beta(\a,\b)=(x^(\a-1)*(1-x)^(\b-1)/((gamma(\a)*gamma(\b))/gamma(\a+\b));% + laplace(\b)=(1/2*\b)*exp(-abs(x)/\b);% + hs(\l,\t)=gaussian(0,sqrt(\l^2*\t^2));% + f2exponential(\l)=\l*exp(-\l*(1/x-1));% + shrinkagelaplace(\r)=f2exponential(\r)*f2jacobian(x);% binormal(\ma,\sa,\mb,\sb,\ro)=exp(-(((x-\ma)/\sa)^2+((y-\mb)/\sb)^2-(2*\ro)*((x-\ma)/\sa)*((y-\mb)/\sb))/(2*(1-\ro^2)))/(2*pi*\sa*\sb*(1-\ro^2)^0.5);% conditionalbinormal(\yc,\ma,\sa,\mb,\sb,\ro)=exp(-(((x-\ma)/\sa)^2+((\yc-\mb)/\sb)^2-(2*\ro)*((x-\ma)/\sa)*((\yc-\mb)/\sb))/(2*(1-\ro^2)))/(2*pi*\sa*\sb*(1-\ro^2)^0.5);% sumtwonormals(\ma,\sa,\wa,\mb,\sb,\wb)=(\wa*gaussian(\ma,\sa))+(\wb*gaussian(\mb,\sb));% @@ -201,9 +206,10 @@ \include{07-Ordinal_Regression} \include{08-Poisson_Regression} \include{09-Robust_Regression} -\include{10-Hierarchical_Models} -\include{11-MCMC} -\include{12-Model_Comparison} +\include{10-Sparse_Regression} +\include{11-Hierarchical_Models} +\include{12-MCMC} +\include{13-Model_Comparison} %--- Citations -------------------------------------------------------% diff --git a/slides/tikz/placeholder b/slides/tikz/.gitkeep similarity index 100% rename from slides/tikz/placeholder rename to slides/tikz/.gitkeep diff --git a/stan/11-sparse_horseshoe_regression.stan b/stan/11-sparse_horseshoe_regression.stan new file mode 100644 index 0000000..5d78ab6 --- /dev/null +++ b/stan/11-sparse_horseshoe_regression.stan @@ -0,0 +1,84 @@ +// run with 4 chains and 2k iters. +// all data should be scaled to mean 0 and std 1: +// sparse_regression data +data { + int N; // number of observations + int K; // number of independent variables + matrix[N, K] X; // data matrix + vector[N] y; // dependent variable vector +} +parameters { + real alpha; // intercept + vector[K] lambda; // local shrinkage factor + real sigma; // model error + real tau; // global shinkrage factor + vector[K] beta; // coefficients for independent variables +} +model { + // priors + alpha ~ student_t(3, 0, 2.5); + lambda ~ cauchy(0, 1); + sigma ~ exponential(1); + tau ~ cauchy(0, 1); + beta ~ normal(0, lambda * tau); + + // likelihood + y ~ normal(alpha + X * beta, sigma); +} +// results: +//Warmup took (0.53, 0.48, 0.53, 0.48) seconds, 2.0 seconds total +//Sampling took (0.40, 0.32, 0.21, 0.17) seconds, 1.1 seconds total +// +// Mean MCSE StdDev 5% 50% 95% N_Eff N_Eff/s R_hat +// +//lp__ -5.6e+01 1.8e-01 5.1 -6.5e+01 -5.5e+01 -47 802 722 1.0 +//accept_stat__ 0.78 0.063 0.32 2.3e-03 0.95 1.00 26 24 1.1e+00 +//stepsize__ 0.084 0.017 0.024 5.1e-02 0.10 0.11 2.0 1.8 7.4e+13 +//treedepth__ 5.0 0.40 1.2 2.0e+00 5.0 6.0 9.2 8.2 1.2e+00 +//n_leapfrog__ 44 11 23 4.0e+00 31 63 4.1 3.7 1.4e+00 +//divergent__ 0.14 0.033 0.35 0.0e+00 0.00 1.0 112 100 1.0e+00 +//energy__ 77 0.23 6.9 6.6e+01 77 89 888 799 1.0e+00 +// +//alpha 1.2e-01 4.6e-03 0.11 -6.8e-02 1.1e-01 0.29 565 509 1.0 +//lambda[1] 7.1e+00 2.7e-01 8.3 1.7e+00 4.5e+00 21 928 835 1.0 +//lambda[2] 7.8e-01 3.6e-02 1.5 4.2e-02 4.4e-01 2.6 1703 1533 1.0 +//lambda[3] 3.9e+00 1.4e-01 4.9 1.0e+00 2.7e+00 10 1217 1095 1.0 +//lambda[4] 8.2e-01 4.7e-02 1.4 4.1e-02 4.4e-01 2.8 883 795 1.0 +//lambda[5] 4.6e+00 1.5e-01 5.5 1.1e+00 3.0e+00 13 1292 1163 1.0 +//lambda[6] 3.5e+00 1.7e-01 5.5 7.9e-01 2.2e+00 9.2 986 887 1.0 +//lambda[7] 8.8e-01 3.7e-02 1.8 4.1e-02 5.0e-01 2.6 2436 2192 1.0 +//lambda[8] 2.2e+00 7.1e-02 2.4 4.9e-01 1.6e+00 5.8 1135 1022 1.0 +//lambda[9] 7.2e-01 2.9e-02 1.2 4.5e-02 4.1e-01 2.3 1658 1493 1.0 +//lambda[10] 1.1e+00 1.0e-01 2.0 6.0e-02 5.3e-01 3.4 365 329 1.0 +//lambda[11] 1.9e+00 4.3e-01 7.3 1.7e-01 9.2e-01 4.0 287 259 1.0 +//lambda[12] 7.3e-01 3.6e-02 1.2 1.7e-02 3.9e-01 2.5 1107 997 1.0 +//lambda[13] 8.8e-01 4.2e-02 1.8 4.8e-02 4.9e-01 2.6 1708 1537 1.0 +//lambda[14] 6.3e+00 6.2e-01 19 1.4e+00 4.0e+00 15 966 869 1.0 +//lambda[15] 8.3e-01 3.0e-02 1.3 6.2e-02 5.0e-01 2.5 1834 1651 1.0 +//lambda[16] 7.3e-01 2.8e-02 1.2 4.4e-02 4.2e-01 2.2 1889 1700 1.0 +//lambda[17] 8.2e-01 8.5e-02 1.5 5.0e-02 4.2e-01 2.9 305 274 1.0 +//lambda[18] 6.9e-01 2.3e-02 0.98 4.9e-02 3.9e-01 2.3 1845 1661 1.0 +//lambda[19] 7.2e-01 3.7e-02 1.1 4.2e-02 4.3e-01 2.3 980 882 1.0 +//lambda[20] 6.0e+00 2.7e-01 10 1.4e+00 3.9e+00 17 1414 1273 1.0 +//sigma 9.6e-01 1.6e-03 0.072 8.5e-01 9.5e-01 1.1 2052 1847 1.0 +//tau 3.0e-01 4.8e-03 0.13 1.3e-01 2.8e-01 0.54 788 710 1.0 +//beta[1] 1.4e+00 2.7e-03 0.10 1.2e+00 1.4e+00 1.6 1470 1323 1.0 +//beta[2] 2.0e-02 1.7e-03 0.074 -9.1e-02 1.1e-02 0.15 1789 1610 1.0 +//beta[3] 7.5e-01 2.8e-03 0.10 5.9e-01 7.5e-01 0.92 1323 1191 1.0 +//beta[4] 3.5e-02 2.5e-03 0.078 -7.8e-02 2.2e-02 0.18 947 852 1.0 +//beta[5] -8.9e-01 2.2e-03 0.10 -1.1e+00 -8.8e-01 -0.72 2132 1919 1.00 +//beta[6] 5.9e-01 4.3e-03 0.099 4.2e-01 5.9e-01 0.75 536 482 1.0 +//beta[7] 6.6e-02 3.1e-03 0.086 -5.6e-02 5.1e-02 0.22 754 679 1.0 +//beta[8] 3.7e-01 3.9e-03 0.10 2.1e-01 3.7e-01 0.53 679 611 1.0 +//beta[9] -1.3e-02 2.1e-03 0.073 -1.5e-01 -6.4e-03 0.11 1264 1138 1.0 +//beta[10] 7.1e-02 9.3e-03 0.10 -6.2e-02 5.0e-02 0.26 119 107 1.0 +//beta[11] 1.8e-01 5.7e-03 0.11 -5.9e-03 1.9e-01 0.38 393 354 1.0 +//beta[12] -3.2e-02 2.1e-03 0.079 -1.8e-01 -1.7e-02 0.079 1430 1287 1.0 +//beta[13] -3.1e-02 4.0e-03 0.082 -1.7e-01 -2.1e-02 0.096 425 383 1.0 +//beta[14] -1.1e+00 4.2e-03 0.097 -1.3e+00 -1.1e+00 -0.98 523 471 1.0 +//beta[15] -6.2e-02 5.0e-03 0.098 -2.4e-01 -4.3e-02 0.069 379 341 1.0 +//beta[16] -2.0e-02 2.3e-03 0.074 -1.6e-01 -1.1e-02 0.087 1040 936 1.0 +//beta[17] -1.6e-02 2.1e-03 0.073 -1.5e-01 -8.0e-03 0.095 1208 1087 1.0 +//beta[18] 3.0e-02 3.3e-03 0.076 -8.4e-02 2.0e-02 0.17 525 473 1.0 +//beta[19] -5.3e-03 2.9e-03 0.064 -1.1e-01 -4.2e-03 0.11 491 442 1.0 +//beta[20] -1.1e+00 5.1e-03 0.12 -1.3e+00 -1.1e+00 -0.93 557 501 1.0 diff --git a/stan/12-sparse_horseshoe_p_regression.stan b/stan/12-sparse_horseshoe_p_regression.stan new file mode 100644 index 0000000..9f55155 --- /dev/null +++ b/stan/12-sparse_horseshoe_p_regression.stan @@ -0,0 +1,85 @@ +// run with 4 chains and 2k iters. +// all data should be scaled to mean 0 and std 1: +// sparse_regression data +data { + int N; // number of observations + int K; // number of independent variables + matrix[N, K] X; // data matrix + vector[N] y; // dependent variable vector +} +parameters { + real alpha; // intercept + vector[K] lambda; // local shrinkage factor + real eta; // additional local shrinkage factor + real sigma; // model error + real tau; // global shinkrage factor + vector[K] beta; // coefficients for independent variables +} +model { + // priors + alpha ~ student_t(3, 0, 2.5); + lambda ~ cauchy(0, 1); + sigma ~ exponential(1); + tau ~ cauchy(0, (1 / K) + machine_precision()); // numerical stability + eta ~ cauchy(0, 1); + beta ~ normal(0, lambda * tau * eta); + + // likelihood + y ~ normal(alpha + X * beta, sigma); +} +// results: +//Warmup took (2.2, 2.2, 2.1, 2.2) seconds, 8.7 seconds total +//Sampling took (2.2, 2.2, 2.4, 3.0) seconds, 9.8 seconds total +// Mean MCSE StdDev 5% 50% 95% N_Eff N_Eff/s R_hat +//lp__ -1.2e+02 1.6e-01 5.6e+00 -1.3e+02 -1.2e+02 -1.1e+02 1194 122 1.0 +//accept_stat__ 0.92 3.6e-03 1.3e-01 6.7e-01 0.97 1.00 1366 139 1.0e+00 +//stepsize__ 0.010 7.2e-04 1.0e-03 8.3e-03 0.011 0.011 2.0 0.20 2.5e+13 +//treedepth__ 8.3 2.3e-01 6.2e-01 8.0e+00 8.0 9.0 7.1 0.73 1.2e+00 +//n_leapfrog__ 401 4.1e+01 1.4e+02 2.6e+02 511 511 12 1.3 1.1e+00 +//divergent__ 0.036 3.5e-03 1.9e-01 0.0e+00 0.00 0.00 2804 286 1.0e+00 +//energy__ 145 2.2e-01 7.3e+00 1.3e+02 145 158 1119 114 1.0e+00 +//alpha 1.1e-01 2.3e-03 1.1e-01 -6.9e-02 1.1e-01 2.9e-01 2278 232 1.0 +//lambda[1] 1.1e+01 4.0e-01 1.7e+01 2.3e+00 7.1e+00 3.4e+01 1774 181 1.00 +//lambda[2] 8.5e-01 3.1e-02 1.7e+00 3.9e-02 4.8e-01 2.6e+00 2985 304 1.00 +//lambda[3] 6.3e+00 2.1e-01 8.8e+00 1.3e+00 4.0e+00 1.7e+01 1688 172 1.0 +//lambda[4] 8.9e-01 3.6e-02 1.9e+00 5.2e-02 5.0e-01 2.6e+00 2601 265 1.0 +//lambda[5] 7.5e+00 3.4e-01 1.3e+01 1.5e+00 4.7e+00 2.0e+01 1345 137 1.0 +//lambda[6] 5.0e+00 1.8e-01 7.2e+00 1.1e+00 3.2e+00 1.4e+01 1680 171 1.0 +//lambda[7] 9.7e-01 3.6e-02 1.8e+00 5.6e-02 5.8e-01 2.9e+00 2408 246 1.0 +//lambda[8] 3.4e+00 1.7e-01 6.1e+00 6.7e-01 2.1e+00 9.5e+00 1324 135 1.0 +//lambda[9] 9.5e-01 8.0e-02 4.8e+00 4.3e-02 4.6e-01 2.8e+00 3623 370 1.0 +//lambda[10] 1.1e+00 5.3e-02 2.3e+00 4.3e-02 5.8e-01 3.5e+00 1988 203 1.0 +//lambda[11] 1.9e+00 5.6e-02 2.6e+00 2.2e-01 1.1e+00 5.5e+00 2167 221 1.0 +//lambda[12] 9.5e-01 4.5e-02 1.9e+00 4.8e-02 5.1e-01 3.0e+00 1760 180 1.0 +//lambda[13] 8.9e-01 2.6e-02 1.4e+00 5.5e-02 5.3e-01 2.7e+00 2697 275 1.0 +//lambda[14] 9.5e+00 4.1e-01 1.5e+01 2.0e+00 6.0e+00 2.6e+01 1266 129 1.0 +//lambda[15] 1.0e+00 4.2e-02 1.9e+00 4.8e-02 5.9e-01 3.1e+00 1918 196 1.0 +//lambda[16] 8.6e-01 3.3e-02 1.5e+00 3.5e-02 4.9e-01 2.8e+00 2012 205 1.0 +// +//lambda[17] 8.2e-01 3.0e-02 1.4e+00 3.5e-02 4.6e-01 2.6e+00 2278 232 1.0 +//lambda[18] 8.6e-01 2.7e-02 1.2e+00 4.7e-02 5.0e-01 2.8e+00 2082 212 1.0 +//lambda[19] 8.3e-01 3.1e-02 1.5e+00 4.8e-02 4.6e-01 2.5e+00 2250 230 1.0 +//lambda[20] 9.4e+00 6.0e-01 2.5e+01 1.9e+00 5.9e+00 2.5e+01 1694 173 1.0 +//eta 3.6e+13 3.9e+12 2.0e+14 7.0e+00 6.6e+07 1.4e+14 2599 265 1.0 +//sigma 9.6e-01 1.5e-03 7.5e-02 8.5e-01 9.5e-01 1.1e+00 2496 255 1.0 +//tau 8.3e-03 1.3e-03 7.8e-02 1.2e-15 2.7e-09 2.7e-02 3855 393 1.00 +//beta[1] 1.4e+00 2.2e-03 1.0e-01 1.2e+00 1.4e+00 1.6e+00 2189 223 1.0 +//beta[2] 2.0e-02 1.5e-03 6.8e-02 -8.1e-02 8.3e-03 1.4e-01 2164 221 1.0 +//beta[3] 7.5e-01 2.1e-03 1.1e-01 5.7e-01 7.5e-01 9.3e-01 2662 272 1.00 +//beta[4] 2.6e-02 1.5e-03 7.2e-02 -7.7e-02 1.4e-02 1.6e-01 2218 226 1.0 +//beta[5] -8.9e-01 1.9e-03 1.1e-01 -1.1e+00 -8.9e-01 -7.1e-01 3191 326 1.0 +//beta[6] 5.8e-01 2.0e-03 1.0e-01 4.1e-01 5.8e-01 7.4e-01 2633 269 1.00 +//beta[7] 5.0e-02 1.9e-03 8.1e-02 -5.7e-02 3.3e-02 2.1e-01 1767 180 1.0 +//beta[8] 3.7e-01 1.9e-03 1.0e-01 2.0e-01 3.6e-01 5.3e-01 2848 291 1.0 +//beta[9] -7.3e-03 1.6e-03 7.2e-02 -1.4e-01 -2.2e-03 1.1e-01 2008 205 1.0 +//beta[10] 5.1e-02 2.0e-03 8.2e-02 -5.6e-02 3.2e-02 2.1e-01 1755 179 1.0 +//beta[11] 1.8e-01 2.7e-03 1.1e-01 -2.0e-03 1.8e-01 3.6e-01 1762 180 1.0 +//beta[12] -2.7e-02 1.7e-03 7.6e-02 -1.7e-01 -1.4e-02 8.6e-02 1874 191 1.0 +//beta[13] -2.9e-02 1.8e-03 7.7e-02 -1.8e-01 -1.6e-02 8.3e-02 1877 191 1.0 +//beta[14] -1.1e+00 1.8e-03 9.7e-02 -1.3e+00 -1.1e+00 -9.7e-01 2840 290 1.0 +//beta[15] -5.0e-02 2.3e-03 8.9e-02 -2.2e-01 -3.0e-02 7.3e-02 1537 157 1.0 +//beta[16] -1.7e-02 1.5e-03 7.2e-02 -1.5e-01 -7.4e-03 9.6e-02 2251 230 1.0 +//beta[17] -1.0e-02 1.4e-03 6.6e-02 -1.2e-01 -3.4e-03 9.4e-02 2331 238 1.0 +//beta[18] 2.1e-02 1.6e-03 7.0e-02 -8.4e-02 9.3e-03 1.5e-01 2032 207 1.0 +//beta[19] -9.5e-04 1.3e-03 6.1e-02 -1.0e-01 -5.3e-05 9.8e-02 2095 214 1.00 +//beta[20] -1.1e+00 2.3e-03 1.2e-01 -1.3e+00 -1.1e+00 -9.2e-01 2663 272 1.0 diff --git a/stan/13-sparse_finnish_horseshoe_regression.stan b/stan/13-sparse_finnish_horseshoe_regression.stan new file mode 100644 index 0000000..b26c2dc --- /dev/null +++ b/stan/13-sparse_finnish_horseshoe_regression.stan @@ -0,0 +1,131 @@ +// run with 4 chains and 2k iters. +// all data should be scaled to mean 0 and std 1: +// sparse_regression data +data { + int N; // number of observations + int K; // number of independent variables + matrix[N, K] X; // data matrix + vector[N] y; // dependent variable vector +} +parameters { + real alpha; // intercept + vector[K] lambda; // local shrinkage factor + real sigma; // model error + real tau; // global shinkrage factor + real c_aux; + vector[K] z; // standard normal for coefficients +} +transformed parameters { + real c = 2 * sqrt(c_aux); // slab_scale = 2 + vector[K] lambda_tilde = sqrt (c^2 * square(lambda) ./ (c^2 + tau^2* square(lambda))); + vector[K] beta = z .* lambda_tilde * tau; // coefficients +} +model { + // priors + alpha ~ student_t(3, 0, 2.5); + z ~ normal(0, 1); + lambda ~ student_t(1, 0, 2.5); // nu_local = 1 + sigma ~ exponential(1); + tau ~ student_t(1, 0, 3 * sigma); // tau_0 = 3 and nu_global = 1 + c_aux ~ inv_gamma(0.5 * 4 , 0.5 * 4 ); // slab_df = 4 + + // likelihood + y ~ normal(alpha + X * beta, sigma); +} +// results: +//Warmup took (0.72, 0.72, 0.69, 0.65) seconds, 2.8 seconds total +//Sampling took (0.73, 0.67, 0.77, 0.80) seconds, 3.0 seconds total +// Mean MCSE StdDev 5% 50% 95% N_Eff N_Eff/s R_hat +//lp__ -7.2e+01 1.7e-01 5.8 -8.3e+01 -7.2e+01 -63 1182 398 1.0 +//accept_stat__ 9.3e-01 1.3e-03 8.5e-02 0.77 0.96 1.00 4246 1429 1.0e+00 +//stepsize__ 4.8e-02 1.3e-03 1.8e-03 0.046 0.048 0.051 2.0 0.67 8.8e+12 +//treedepth__ 6.3e+00 6.1e-02 4.7e-01 6.0 6.0 7.0 58 20 1.0e+00 +//n_leapfrog__ 9.9e+01 3.2e+00 3.4e+01 63 127 127 111 37 1.0e+00 +//divergent__ 2.5e-04 nan 1.6e-02 0.00 0.00 0.00 nan nan nan +//energy__ 9.4e+01 2.2e-01 7.5e+00 82 94 107 1147 386 1.0e+00 +//alpha 1.2e-01 1.8e-03 0.11 -5.9e-02 1.1e-01 0.30 3554 1196 1.0 +//lambda[1] 7.9e+01 2.9e+01 1452 3.6e+00 1.3e+01 121 2588 871 1.0 +//lambda[2] 4.7e+00 2.6e+00 162 8.5e-02 9.6e-01 6.2 3996 1345 1.00 +//lambda[3] 4.1e+01 1.9e+01 794 2.1e+00 6.7e+00 49 1700 572 1.0 +//lambda[4] 3.1e+00 4.3e-01 23 8.7e-02 1.1e+00 7.9 2819 949 1.0 +//lambda[5] 3.0e+01 4.3e+00 202 2.4e+00 7.8e+00 57 2226 749 1.0 +//lambda[6] 1.2e+01 8.5e-01 40 1.6e+00 5.2e+00 37 2160 727 1.0 +//lambda[7] 3.3e+00 4.3e-01 20 1.0e-01 1.2e+00 8.6 2165 728 1.00 +//lambda[8] 8.6e+00 7.6e-01 40 1.1e+00 3.6e+00 23 2722 916 1.00 +//lambda[9] 5.9e+00 2.3e+00 105 7.0e-02 9.2e-01 6.8 2097 706 1.0 +//lambda[10] 1.0e+01 5.3e+00 303 1.0e-01 1.2e+00 9.0 3254 1095 1.0 +//lambda[11] 5.5e+00 6.6e-01 27 4.2e-01 2.3e+00 14 1689 568 1.0 +//lambda[12] 2.9e+00 2.9e-01 17 7.6e-02 1.0e+00 7.8 3446 1160 1.0 +//lambda[13] 2.9e+00 4.3e-01 25 8.4e-02 1.0e+00 6.9 3304 1112 1.0 +//lambda[14] 4.0e+01 8.3e+00 302 2.9e+00 1.0e+01 83 1334 449 1.0 +//lambda[15] 3.6e+00 6.4e-01 31 9.6e-02 1.3e+00 8.2 2388 803 1.0 +//lambda[16] 2.7e+00 2.7e-01 15 8.0e-02 1.0e+00 6.7 2963 997 1.0 +//lambda[17] 3.9e+00 1.1e+00 66 7.4e-02 1.0e+00 7.2 3953 1330 1.00 +//lambda[18] 3.0e+00 3.7e-01 20 7.3e-02 9.9e-01 6.8 2827 951 1.00 +//lambda[19] 2.4e+00 2.3e-01 10 5.9e-02 8.8e-01 6.7 1944 654 1.00 +//lambda[20] 3.6e+01 5.2e+00 284 2.8e+00 9.9e+00 93 3021 1017 1.0 +//sigma 9.5e-01 1.3e-03 0.074 8.4e-01 9.5e-01 1.1 3428 1153 1.0 +//tau 1.6e-01 4.3e-03 0.12 5.7e-02 1.3e-01 0.32 817 275 1.0 +//c_aux 1.3e+00 3.1e-02 1.4 3.7e-01 9.2e-01 3.6 2208 743 1.00 +//z[1] 1.3e+00 1.1e-02 0.53 6.1e-01 1.2e+00 2.3 2370 798 1.00 +//z[2] 1.3e-01 1.3e-02 0.68 -1.0e+00 1.0e-01 1.3 2814 947 1.0 +//z[3] 1.1e+00 1.2e-02 0.56 4.1e-01 9.9e-01 2.2 2358 794 1.0 +//z[4] 2.2e-01 1.2e-02 0.65 -8.2e-01 1.7e-01 1.4 2934 987 1.0 +//z[5] -1.2e+00 1.1e-02 0.56 -2.2e+00 -1.0e+00 -0.46 2664 896 1.0 +//z[6] 1.0e+00 1.1e-02 0.56 3.4e-01 9.4e-01 2.1 2662 896 1.0 +//z[7] 3.6e-01 1.2e-02 0.63 -6.2e-01 2.8e-01 1.5 2792 940 1.00 +//z[8] 9.4e-01 1.1e-02 0.56 2.5e-01 8.3e-01 2.0 2391 805 1.0 +//z[9] -7.4e-02 1.3e-02 0.68 -1.2e+00 -5.8e-02 1.0 2899 975 1.0 +//z[10] 3.4e-01 1.3e-02 0.65 -7.2e-01 2.9e-01 1.4 2405 809 1.0 +//z[11] 7.3e-01 1.1e-02 0.55 7.6e-02 6.4e-01 1.8 2641 889 1.0 +//z[12] -1.9e-01 1.2e-02 0.65 -1.3e+00 -1.6e-01 0.94 2911 979 1.00 +//z[13] -1.8e-01 1.3e-02 0.68 -1.3e+00 -1.4e-01 0.91 2574 866 1.0 +//z[14] -1.2e+00 1.0e-02 0.55 -2.3e+00 -1.1e+00 -0.54 2774 933 1.0 +//z[15] -3.4e-01 1.3e-02 0.67 -1.5e+00 -2.8e-01 0.76 2752 926 1.00 +//z[16] -1.4e-01 1.4e-02 0.69 -1.3e+00 -1.2e-01 1.0 2272 764 1.0 +//z[17] -1.1e-01 1.4e-02 0.69 -1.3e+00 -9.1e-02 1.1 2306 776 1.0 +//z[18] 1.4e-01 1.4e-02 0.69 -1.1e+00 1.3e-01 1.3 2427 817 1.0 +// +//z[19] -7.5e-03 1.2e-02 0.68 -1.1e+00 -1.1e-02 1.1 3048 1026 1.0 +//z[20] -1.2e+00 1.1e-02 0.56 -2.3e+00 -1.1e+00 -0.52 2431 818 1.0 +//c 2.1e+00 1.8e-02 0.88 1.2e+00 1.9e+00 3.8 2468 830 1.00 +//lambda_tilde[1] 1.1e+01 1.9e-01 8.2 3.0e+00 8.6e+00 26 1937 652 1.0 +//lambda_tilde[2] 1.6e+00 4.4e-02 2.3 8.5e-02 9.5e-01 5.3 2768 931 1.0 +//lambda_tilde[3] 7.4e+00 1.3e-01 5.9 1.9e+00 5.7e+00 19 1994 671 1.0 +//lambda_tilde[4] 1.9e+00 6.1e-02 3.0 8.7e-02 1.1e+00 6.2 2402 808 1.0 +//lambda_tilde[5] 8.2e+00 1.5e-01 6.7 2.1e+00 6.4e+00 20 2018 679 1.0 +//lambda_tilde[6] 6.3e+00 1.2e-01 5.5 1.6e+00 4.7e+00 17 2051 690 1.00 +//lambda_tilde[7] 2.0e+00 4.9e-02 2.5 1.0e-01 1.2e+00 6.4 2532 852 1.0 +//lambda_tilde[8] 4.7e+00 8.6e-02 4.2 1.1e+00 3.4e+00 12 2410 811 1.0 +//lambda_tilde[9] 1.6e+00 4.5e-02 2.3 7.0e-02 9.2e-01 5.5 2615 880 1.00 +//lambda_tilde[10] 2.0e+00 5.6e-02 2.7 1.0e-01 1.2e+00 6.6 2401 808 1.0 +//lambda_tilde[11] 3.2e+00 6.8e-02 3.3 4.2e-01 2.2e+00 9.1 2446 823 1.0 +//lambda_tilde[12] 1.8e+00 4.8e-02 2.4 7.6e-02 1.0e+00 6.2 2586 870 1.0 +//lambda_tilde[13] 1.7e+00 4.1e-02 2.2 8.4e-02 1.0e+00 5.7 2840 956 1.0 +//lambda_tilde[14] 9.4e+00 1.5e-01 6.8 2.6e+00 7.5e+00 23 2177 732 1.0 +//lambda_tilde[15] 2.1e+00 5.7e-02 2.9 9.6e-02 1.3e+00 6.5 2520 848 1.00 +//lambda_tilde[16] 1.7e+00 4.1e-02 2.2 8.0e-02 1.0e+00 5.5 2950 993 1.0 +//lambda_tilde[17] 1.8e+00 4.9e-02 2.5 7.4e-02 9.9e-01 5.9 2667 897 1.0 +//lambda_tilde[18] 1.7e+00 5.5e-02 2.6 7.3e-02 9.8e-01 5.6 2359 794 1.00 +//lambda_tilde[19] 1.6e+00 4.0e-02 2.2 5.9e-02 8.7e-01 5.5 3056 1028 1.00 +//lambda_tilde[20] 9.8e+00 1.9e-01 8.3 2.4e+00 7.6e+00 25 1960 659 1.0 +//beta[1] 1.4e+00 1.6e-03 0.10 1.2e+00 1.4e+00 1.5 4069 1369 1.00 +//beta[2] 2.1e-02 1.1e-03 0.077 -1.0e-01 1.1e-02 0.16 5190 1746 1.00 +//beta[3] 7.5e-01 1.6e-03 0.10 5.8e-01 7.5e-01 0.92 4158 1399 1.0 +//beta[4] 3.7e-02 1.3e-03 0.081 -8.3e-02 2.3e-02 0.19 4112 1384 1.00 +//beta[5] -8.9e-01 1.6e-03 0.11 -1.1e+00 -8.9e-01 -0.71 4178 1406 1.0 +//beta[6] 5.9e-01 1.5e-03 0.097 4.2e-01 5.9e-01 0.75 3984 1341 1.00 +//beta[7] 6.7e-02 1.5e-03 0.088 -5.4e-02 5.3e-02 0.23 3609 1214 1.00 +//beta[8] 3.8e-01 1.7e-03 0.097 2.1e-01 3.8e-01 0.53 3405 1146 1.00 +//beta[9] -1.2e-02 1.1e-03 0.076 -1.4e-01 -5.8e-03 0.11 4920 1656 1.00 +//beta[10] 6.9e-02 1.6e-03 0.091 -5.4e-02 5.4e-02 0.24 3252 1094 1.0 +//beta[11] 2.0e-01 2.2e-03 0.11 1.5e-02 2.0e-01 0.40 2820 949 1.00 +//beta[12] -3.5e-02 1.3e-03 0.083 -1.9e-01 -2.0e-02 0.087 4181 1407 1.00 +//beta[13] -3.1e-02 1.2e-03 0.083 -1.8e-01 -1.8e-02 0.093 4465 1502 1.00 +//beta[14] -1.1e+00 1.6e-03 0.095 -1.3e+00 -1.1e+00 -0.98 3399 1144 1.00 +//beta[15] -7.1e-02 1.6e-03 0.10 -2.6e-01 -5.1e-02 0.068 3870 1302 1.00 +//beta[16] -2.4e-02 1.2e-03 0.081 -1.7e-01 -1.2e-02 0.10 4247 1429 1.00 +//beta[17] -1.9e-02 1.2e-03 0.076 -1.5e-01 -1.1e-02 0.10 3888 1308 1.0 +//beta[18] 2.7e-02 1.3e-03 0.079 -9.5e-02 1.6e-02 0.18 3866 1301 1.00 +//beta[19] -1.8e-03 9.2e-04 0.068 -1.2e-01 -8.5e-04 0.12 5414 1822 1.00 +//beta[20] -1.1e+00 1.9e-03 0.12 -1.3e+00 -1.1e+00 -0.92 4169 1403 1.0 diff --git a/stan/14-sparse_r2d2_regression.stan b/stan/14-sparse_r2d2_regression.stan new file mode 100644 index 0000000..64e5e27 --- /dev/null +++ b/stan/14-sparse_r2d2_regression.stan @@ -0,0 +1,105 @@ +// run with 4 chains and 2k iters. +// all data should be scaled to mean 0 and std 1: +// sparse_regression data +data { + int N; // number of observations + int K; // number of independent variables + matrix[N, K] X; // data matrix + vector[N] y; // dependent variable vector +} +parameters { + real alpha; // intercept + vector[K] z; // standard normal for coefficients + real sigma; // model error + real r2; // coefficient of determination + simplex[K] phi; // local shrinkage parameter +} +transformed parameters { + real tau_2 = sigma^2 * r2 / (1 - r2); // global shrinkage parameter + vector[K] beta = z .* sqrt(phi .* tau_2); // coefficients +} +model { + // priors + alpha ~ student_t(3, 0, 2.5); + sigma ~ exponential(1); + r2 ~ beta(0.5 * 2, (1 - 0.5) * 2); // mean_r2 = 0.5 and prec_r2 = 2 + phi ~ dirichlet(rep_vector(1, K)); // cons_d2 = 1 + + // likelihood + y ~ normal(alpha + X * beta, sigma); +} +// results: +//Warmup took (6.4, 6.5, 6.4, 6.4) seconds, 26 seconds total +//Sampling took (7.8, 7.9, 7.8, 7.9) seconds, 31 seconds total +// Mean MCSE StdDev 5% 50% 95% N_Eff N_Eff/s R_hat +//lp__ -1.7e+02 1.0e+00 4.7e+00 -1.8e+02 -1.7e+02 -1.6e+02 20 0.63 1.1 +//accept_stat__ 0.96 3.9e-03 6.4e-02 0.83 0.98 1.00 267 8.5 1.0e+00 +//stepsize__ 0.038 2.4e-03 3.4e-03 0.033 0.038 0.042 2.0 0.064 2.0e+13 +//treedepth__ 10.0 nan 2.7e-02 10 10 10 nan nan nan +//n_leapfrog__ 1023 nan 5.0e+00 1023 1023 1023 nan nan nan +//divergent__ 0.00 nan 0.0e+00 0.00 0.00 0.00 nan nan nan +//energy__ 191 9.1e-01 6.4e+00 181 190 202 50 1.6 1.0e+00 +//alpha 8.9e-02 1.4e-02 1.1e-01 -8.7e-02 9.1e-02 2.6e-01 59 1.9 1.0 +//z[1] 2.4e+12 6.0e+11 1.6e+12 7.4e+11 1.9e+12 6.1e+12 7.1 0.23 1.9 +//z[2] -2.9e+09 5.1e+10 1.2e+11 -1.9e+11 2.1e+09 2.0e+11 5.2 0.17 2.1 +//z[3] 1.2e+12 2.7e+11 6.2e+11 3.6e+11 1.1e+12 2.3e+12 5.2 0.17 1.7 +//z[4] 2.8e+11 1.2e+11 2.8e+11 -6.2e+10 2.0e+11 8.8e+11 5.2 0.16 2.1 +//z[5] -1.8e+12 5.2e+11 1.2e+12 -4.4e+12 -1.5e+12 -5.0e+11 5.2 0.16 1.9 +//z[6] 1.2e+12 3.7e+11 9.2e+11 3.5e+11 8.6e+11 2.8e+12 6.1 0.19 1.6 +//z[7] 1.3e+08 2.9e+10 1.1e+11 -1.8e+11 1.2e+10 1.9e+11 16 0.51 1.3 +//z[8] 5.8e+11 1.1e+11 2.9e+11 1.9e+11 5.4e+11 1.2e+12 6.9 0.22 1.6 +//z[9] 2.0e+10 6.1e+10 1.4e+11 -1.7e+11 -3.7e+09 3.1e+11 4.9 0.15 1.4 +//z[10] 1.6e+11 1.1e+11 2.6e+11 -2.0e+11 7.4e+10 7.4e+11 5.3 0.17 1.4 +//z[11] 2.5e+11 6.4e+10 1.3e+11 5.5e+10 2.5e+11 4.7e+11 4.2 0.13 1.7 +//z[12] -9.8e+09 7.9e+10 1.6e+11 -2.8e+11 -1.5e+10 2.6e+11 4.1 0.13 1.6 +//z[13] -1.2e+11 1.0e+11 1.8e+11 -4.7e+11 -5.8e+10 9.6e+10 3.2 0.10 1.9 +//z[14] -2.0e+12 6.4e+11 1.3e+12 -4.6e+12 -1.5e+12 -6.1e+11 4.3 0.14 1.9 +//z[15] -2.0e+11 1.7e+11 2.7e+11 -7.3e+11 -9.6e+10 4.7e+10 2.4 0.078 2.9 +//z[16] 1.7e+11 7.6e+10 1.8e+11 -4.0e+10 1.2e+11 4.6e+11 5.4 0.17 2.0 +//z[17] -3.9e+10 2.5e+10 8.6e+10 -1.7e+11 -3.6e+10 8.4e+10 12 0.39 1.4 +//z[18] 3.0e+09 6.9e+10 1.6e+11 -2.5e+11 -8.9e+09 2.7e+11 5.3 0.17 1.5 +//z[19] -4.8e+10 4.3e+10 1.7e+11 -3.4e+11 -7.0e+09 1.9e+11 16 0.50 1.3 +//z[20] -1.8e+12 2.8e+11 8.0e+11 -3.2e+12 -1.7e+12 -5.6e+11 8.0 0.26 1.4 +//sigma 9.5e-01 2.9e-03 7.2e-02 8.4e-01 9.4e-01 1.1e+00 635 20 1.0 +//r2 1.8e-23 8.4e-24 2.5e-23 2.2e-24 9.7e-24 6.5e-23 8.7 0.28 1.3 +//phi[1] 6.3e-02 8.8e-03 4.4e-02 1.6e-02 5.0e-02 1.5e-01 25 0.81 1.2 +//phi[2] 4.5e-02 6.3e-03 4.7e-02 2.1e-03 2.9e-02 1.4e-01 56 1.8 1.0 +//phi[3] 6.9e-02 7.9e-03 4.8e-02 2.0e-02 5.6e-02 1.7e-01 37 1.2 1.0 +//phi[4] 3.3e-02 6.1e-03 3.6e-02 1.2e-03 2.1e-02 1.1e-01 34 1.1 1.1 +//phi[5] 5.5e-02 9.9e-03 4.4e-02 3.4e-03 4.4e-02 1.4e-01 19 0.62 1.1 +//phi[6] 5.3e-02 6.2e-03 4.2e-02 1.1e-02 4.1e-02 1.4e-01 45 1.4 1.1 +//phi[7] 4.3e-02 4.1e-03 4.7e-02 1.5e-03 2.7e-02 1.4e-01 131 4.2 1.0 +//phi[8] 7.0e-02 1.8e-02 5.2e-02 1.1e-02 5.8e-02 1.7e-01 8.0 0.26 1.2 +//phi[9] 4.3e-02 1.9e-03 4.3e-02 1.8e-03 2.9e-02 1.3e-01 531 17 1.0 +//phi[10] 4.4e-02 7.1e-03 4.3e-02 2.4e-03 3.0e-02 1.3e-01 37 1.2 1.0 +//phi[11] 6.7e-02 7.1e-03 5.2e-02 8.3e-03 5.5e-02 1.7e-01 54 1.7 1.0 +//phi[12] 4.2e-02 3.2e-03 4.5e-02 1.7e-03 2.7e-02 1.4e-01 192 6.1 1.0 +//phi[13] 4.1e-02 3.7e-03 4.1e-02 2.1e-03 2.7e-02 1.2e-01 119 3.8 1.0 +//phi[14] 6.9e-02 1.8e-02 5.3e-02 1.3e-02 5.3e-02 1.8e-01 9.0 0.29 1.3 +//phi[15] 4.4e-02 7.2e-03 4.4e-02 2.6e-03 3.1e-02 1.3e-01 37 1.2 1.0 +//phi[16] 3.4e-02 7.1e-03 4.0e-02 1.1e-03 2.0e-02 1.2e-01 31 0.99 1.1 +//phi[17] 4.4e-02 1.2e-03 4.3e-02 2.2e-03 3.0e-02 1.3e-01 1400 45 1.0 +//phi[18] 3.9e-02 2.5e-03 4.2e-02 1.6e-03 2.5e-02 1.2e-01 282 9.0 1.0 +//phi[19] 3.6e-02 2.3e-03 3.9e-02 1.4e-03 2.4e-02 1.1e-01 290 9.2 1.0 +//phi[20] 6.8e-02 7.8e-03 4.5e-02 1.7e-02 5.7e-02 1.5e-01 33 1.1 1.1 +//tau_2 1.6e-23 7.2e-24 2.1e-23 2.0e-24 8.8e-24 5.6e-23 8.2 0.26 1.4 +//beta[1] 1.4e+00 5.2e-03 1.0e-01 1.2e+00 1.4e+00 1.6e+00 363 12 1.0 +//beta[2] 1.7e-03 2.8e-02 6.0e-02 -1.0e-01 1.2e-03 1.0e-01 4.7 0.15 1.7 +//beta[3] 7.6e-01 7.7e-03 1.0e-01 5.9e-01 7.6e-01 9.3e-01 180 5.7 1.0 +//beta[4] 8.3e-02 1.6e-02 7.8e-02 -3.6e-02 8.0e-02 2.2e-01 25 0.79 1.1 +//beta[5] -8.9e-01 4.8e-03 1.1e-01 -1.1e+00 -8.9e-01 -7.1e-01 522 17 1.0 +//beta[6] 5.8e-01 5.2e-03 9.1e-02 4.3e-01 5.8e-01 7.2e-01 311 9.9 1.0 +//beta[7] 9.0e-03 1.3e-02 6.3e-02 -8.8e-02 4.4e-03 1.2e-01 22 0.70 1.2 +//beta[8] 3.7e-01 2.0e-02 9.7e-02 2.1e-01 3.7e-01 5.3e-01 23 0.73 1.1 +//beta[9] 3.5e-03 2.1e-02 6.0e-02 -8.9e-02 -1.8e-03 1.1e-01 7.8 0.25 1.3 +//beta[10] 6.8e-02 3.0e-02 8.3e-02 -4.4e-02 5.6e-02 2.2e-01 7.7 0.24 1.2 +//beta[11] 1.6e-01 1.6e-02 8.4e-02 4.0e-02 1.6e-01 3.2e-01 28 0.88 1.1 +//beta[12] -1.2e-02 2.5e-02 7.1e-02 -1.4e-01 -7.8e-03 1.0e-01 8.2 0.26 1.3 +//beta[13] -4.3e-02 2.8e-02 6.7e-02 -1.6e-01 -3.4e-02 5.1e-02 5.6 0.18 1.3 +//beta[14] -1.1e+00 8.8e-03 9.5e-02 -1.3e+00 -1.1e+00 -9.9e-01 117 3.7 1.0 +//beta[15] -7.8e-02 3.8e-02 8.4e-02 -2.4e-01 -5.8e-02 2.4e-02 4.9 0.16 1.4 +//beta[16] 4.6e-02 1.9e-02 6.0e-02 -4.3e-02 3.9e-02 1.5e-01 10 0.33 1.2 +//beta[17] -2.1e-02 1.2e-02 5.1e-02 -1.1e-01 -1.6e-02 5.2e-02 20 0.62 1.3 +//beta[18] 7.3e-03 3.0e-02 7.6e-02 -1.1e-01 -3.2e-03 1.4e-01 6.5 0.21 1.4 +//beta[19] -5.6e-03 1.5e-02 6.6e-02 -1.2e-01 -2.6e-03 9.9e-02 21 0.65 1.2 +//beta[20] -1.1e+00 8.5e-03 1.2e-01 -1.3e+00 -1.1e+00 -9.5e-01 187 5.9 1.0 diff --git a/stan/11-hierarchical_varying_intercept-non_centered.stan b/stan/15-hierarchical_varying_intercept-non_centered.stan similarity index 100% rename from stan/11-hierarchical_varying_intercept-non_centered.stan rename to stan/15-hierarchical_varying_intercept-non_centered.stan diff --git a/stan/11-hierarchical_varying_intercept.stan b/stan/15-hierarchical_varying_intercept.stan similarity index 100% rename from stan/11-hierarchical_varying_intercept.stan rename to stan/15-hierarchical_varying_intercept.stan diff --git a/stan/12-hierarchical_varying_intercept_slope.stan b/stan/16-hierarchical_varying_intercept_slope.stan similarity index 100% rename from stan/12-hierarchical_varying_intercept_slope.stan rename to stan/16-hierarchical_varying_intercept_slope.stan diff --git a/stan/13-model_comparison-negative_binomial.stan b/stan/17-model_comparison-negative_binomial.stan similarity index 100% rename from stan/13-model_comparison-negative_binomial.stan rename to stan/17-model_comparison-negative_binomial.stan diff --git a/stan/13-model_comparison-poisson.stan b/stan/17-model_comparison-poisson.stan similarity index 100% rename from stan/13-model_comparison-poisson.stan rename to stan/17-model_comparison-poisson.stan diff --git a/stan/13-model_comparison-zero_inflated-negative_binomial.stan b/stan/17-model_comparison-zero_inflated-negative_binomial.stan similarity index 100% rename from stan/13-model_comparison-zero_inflated-negative_binomial.stan rename to stan/17-model_comparison-zero_inflated-negative_binomial.stan diff --git a/stan/13-model_comparison-zero_inflated-poisson.stan b/stan/17-model_comparison-zero_inflated-poisson.stan similarity index 100% rename from stan/13-model_comparison-zero_inflated-poisson.stan rename to stan/17-model_comparison-zero_inflated-poisson.stan diff --git a/stan/sparse_regression.data.json b/stan/sparse_regression.data.json new file mode 100644 index 0000000..b2e7c67 --- /dev/null +++ b/stan/sparse_regression.data.json @@ -0,0 +1,2308 @@ +{ + "N": 100, + "K": 20, + "y": [ + -1.2749, + 1.8434, + 0.4592, + 0.564, + 1.873, + 0.5275, + 2.4347, + -0.8946, + -0.2059, + 3.1101, + -4.1799, + 1.7001, + 2.7134, + 3.042, + 0.5223, + 3.5831, + -4.1732, + -1.6843, + 5.3033, + -1.264, + 1.7352, + -3.4808, + 6.2659, + -2.5668, + -0.912, + -0.751, + -3.6688, + -4.0008, + 1.4402, + -2.062, + -2.8931, + 3.1257, + 5.5322, + -1.7408, + 5.4528, + 1.8475, + -1.2756, + -1.0673, + -1.7376, + 2.8906, + 5.015, + 4.4429, + -1.6804, + 2.4295, + 3.3861, + 4.7059, + -2.8131, + 5.2427, + -2.7951, + 2.9433, + 4.0297, + 4.7882, + -0.1049, + -3.3738, + 0.833, + 3.1628, + 2.7692, + 3.2497, + 5.3844, + -3.0158, + 0.9627, + 1.7121, + -1.7406, + 4.2161, + -0.4489, + 3.9349, + 1.2746, + 0.2795, + -0.0812, + 1.4699, + 0.3513, + 5.8964, + 1.3729, + -2.1562, + -0.6776, + 1.9378, + 1.1399, + -1.9851, + 2.5081, + 6.1177, + -1.3609, + 1.4945, + 0.2201, + 3.3445, + -1.5187, + 0.7064, + 1.893, + -1.192, + 2.1107, + 2.4808, + -4.7444, + -2.1233, + -1.3899, + -3.9647, + 1.2049, + -6.8575, + -3.0141, + -3.2133, + 4.5017, + -3.3189 + ], + "X": [ + [ + 0.2739, + -0.0367, + 0.8547, + 0.9675, + 1.4155, + 0.5234, + 0.5627, + 1.1112, + 1.6408, + 0.6187, + 0.9999, + -0.0784, + -0.6033, + 0.0332, + -0.7009, + 1.1578, + 1.4578, + 0.7749, + -1.2685, + 1.9936 + ], + [ + 2.2448, + -0.546, + 0.2341, + -1.335, + 1.3131, + 0.5213, + -0.61, + -0.8614, + -0.2705, + 0.2301, + -0.1057, + 0.1631, + 0.7621, + 0.6781, + -0.5283, + -0.8792, + -0.4729, + -1.1172, + -0.7377, + -1.0788 + ], + [ + -0.1254, + -0.6069, + -0.8539, + -0.1488, + -0.6647, + 0.6066, + 0.1617, + -0.8627, + 0.6042, + 1.194, + 0.5013, + -0.9452, + 0.3989, + -0.7648, + 1.2854, + 0.6449, + 0.1792, + 0.0447, + 1.1053, + 0.3041 + ], + [ + -0.5436, + 1.1084, + -0.1042, + 1.0165, + 0.6999, + 1.655, + 0.49, + 0.0234, + 0.256, + -0.1273, + -0.0626, + 0.642, + 0.0755, + -1.3785, + -1.0247, + -2.1184, + -0.4695, + 0.6978, + 0.8656, + -0.7895 + ], + [ + -1.4594, + -0.2745, + 0.1119, + -0.8518, + 0.3153, + 1.0507, + 1.3864, + 0.2845, + 1.1405, + 2.6813, + -1.0147, + 0.367, + 1.7376, + -1.2661, + 1.452, + -0.7895, + -0.9844, + -1.637, + -0.5829, + -1.5289 + ], + [ + 1.0632, + -0.7535, + -1.3826, + 1.0762, + 0.37, + 1.4987, + -0.3605, + -0.2142, + 1.8266, + 0.8711, + -0.0637, + 1.0051, + 0.3152, + 0.5367, + -1.2624, + -1.5849, + -0.6315, + -1.8787, + 0.4504, + 1.4423 + ], + [ + 0.1158, + -0.9663, + 0.2742, + 0.0185, + -0.2104, + 0.5441, + -2.5055, + 2.1963, + 0.5925, + -1.0876, + -1.4646, + 0.5727, + -0.6211, + -0.6449, + -1.3505, + 0.3834, + 1.263, + 1.5608, + -0.9634, + -0.2451 + ], + [ + 0.3897, + 0.3998, + -0.4667, + 0.4776, + 0.8483, + 0.2574, + -0.0995, + 0.999, + -0.1925, + -0.4042, + 2.2074, + -1.0588, + 0.0957, + 0.6612, + -0.1247, + -0.4362, + 1.5593, + 0.7051, + -0.8578, + -0.7328 + ], + [ + 0.1932, + 0.1972, + -1.2431, + 1.9094, + 0.1149, + 0.4516, + -0.2671, + -0.2735, + -1.6111, + -0.3256, + -0.6787, + -0.4791, + 0.4078, + 0.2764, + 0.0341, + 0.6862, + 0.5295, + 0.877, + -0.5082, + -0.8352 + ], + [ + -0.0964, + 1.1772, + 2.5515, + 0.9557, + 0.0256, + -0.0456, + -0.6595, + 0.0392, + 0.1717, + 0.6434, + -1.2376, + 2.1802, + -0.526, + -0.3187, + -0.7252, + -1.1939, + 0.1684, + 0.486, + 2.5947, + -0.8472 + ], + [ + -0.942, + -0.6529, + -0.9382, + 0.4069, + -0.8297, + -0.0617, + 1.366, + 2.6536, + 1.0021, + 1.0413, + -0.3874, + -0.4114, + 1.4644, + 2.1752, + -0.4849, + 1.9289, + 0.0015, + -1.4663, + -1.1153, + 0.587 + ], + [ + -0.0763, + 1.0724, + 1.4585, + -0.6393, + 0.8136, + -0.7445, + 1.1765, + -0.0551, + 0.1899, + 0.0652, + 0.0706, + -0.2273, + -0.1358, + -0.3673, + -1.9611, + -0.4843, + 0.2974, + -0.3503, + -0.3324, + -0.173 + ], + [ + 0.8096, + -0.0184, + -0.6544, + -1.4904, + -0.8282, + -1.1228, + 0.1466, + -0.3991, + -1.4015, + -1.1258, + -0.4077, + -0.97, + -1.9905, + -2.4622, + 0.4592, + -0.3095, + -1.205, + -0.4188, + -3.0644, + -0.1496 + ], + [ + 0.7473, + -0.5249, + 1.4236, + -1.3183, + 0.2147, + 0.9126, + -0.4124, + -0.7198, + -0.8023, + -0.9405, + -0.5945, + 0.7613, + -0.0196, + 0.3967, + -0.5995, + 0.2373, + 0.8896, + 0.0117, + -0.0442, + 0.071 + ], + [ + -1.4167, + -0.7631, + 0.869, + -0.7789, + -1.2765, + -0.4122, + 0.3932, + 1.0312, + 2.0219, + 0.3158, + -1.3054, + 1.3843, + 0.0287, + -0.6405, + -0.7458, + -1.3119, + 1.8526, + 0.2668, + -0.5192, + -0.2853 + ], + [ + 0.9168, + 1.4558, + 0.617, + 0.9248, + -0.8268, + 1.5468, + 1.0458, + -1.7405, + -0.9719, + -0.114, + 0.7267, + -0.9197, + 0.3366, + 0.7456, + 0.2696, + -0.6488, + 1.679, + -1.4176, + -2.2351, + -0.602 + ], + [ + -0.7541, + -0.8248, + -0.6534, + -0.2967, + 1.1826, + -0.1351, + 1.0325, + 0.4076, + -0.1557, + -2.2789, + -0.85, + -0.0132, + -0.5052, + 1.7764, + -1.0154, + 0.0146, + -0.6478, + 0.8903, + 0.6218, + -0.0144 + ], + [ + 1.009, + -0.105, + -0.5756, + -0.7471, + -0.3596, + 0.149, + 0.8801, + -2.194, + 0.7447, + -0.3707, + -1.3628, + 1.6347, + 0.7988, + -0.4434, + -0.5408, + -0.3631, + 2.4228, + -2.3836, + -0.0282, + 1.2054 + ], + [ + 1.8617, + -2.0758, + -0.1991, + 0.6703, + -1.0348, + -1.3355, + 0.8222, + -0.5386, + -0.1964, + 1.2538, + 0.3078, + -0.617, + 0.2222, + -1.0956, + -0.3922, + -0.3275, + 0.22, + 0.7101, + 1.2972, + -1.4871 + ], + [ + -0.696, + 0.8456, + -1.0025, + 0.5291, + -0.7935, + 0.1827, + -0.5873, + -0.3091, + -0.4098, + -0.4401, + -0.3518, + -1.6722, + 0.6798, + 0.2689, + 1.6811, + 0.0553, + 2.5632, + 0.2928, + 1.2067, + -0.9489 + ], + [ + 0.5055, + -1.7915, + -0.1371, + -0.246, + -1.8472, + -0.2889, + -0.9762, + -1.0958, + -1.427, + -0.9556, + 0.2267, + -0.907, + 0.3716, + -0.2126, + -1.2652, + 1.2632, + -0.3601, + -0.1146, + 0.3511, + 0.6536 + ], + [ + -1.1749, + 0.5357, + 0.5748, + 0.5684, + 0.6638, + -0.06, + 1.2906, + 1.5559, + -0.5767, + -0.1181, + -0.2518, + 1.0265, + -0.1809, + 0.0853, + -0.7605, + -0.1452, + 0.0608, + 0.3027, + 0.0586, + 1.1517 + ], + [ + 0.2469, + -0.5814, + 1.657, + 1.3208, + -0.6695, + 0.9212, + -0.3403, + -0.0191, + -0.191, + -1.3741, + -0.2413, + 0.4619, + -0.3865, + -1.0833, + -0.4613, + 0.8314, + 1.296, + -0.1834, + 1.306, + -0.7559 + ], + [ + -1.8961, + 0.488, + 1.5803, + -0.9202, + 0.7932, + -1.3032, + 0.6191, + 0.9757, + -0.5679, + 0.164, + -0.0397, + 1.6935, + -2.3104, + 0.6332, + -1.8679, + 1.0177, + 0.4093, + -2.4261, + -2.1604, + -0.1382 + ], + [ + 0.0599, + -0.2887, + 0.4027, + 0.2679, + 1.3577, + -1.8516, + 0.4943, + 1.9372, + -1.3563, + 0.5713, + -1.702, + 1.0699, + 0.1546, + 0.2084, + -0.1699, + -0.4453, + -1.2307, + 0.0363, + -0.3472, + -0.656 + ], + [ + -1.4998, + 0.948, + -0.6347, + -1.2107, + 0.5042, + -1.0996, + -0.0616, + -0.2207, + -1.1701, + -1.6871, + 0.9798, + -1.3666, + -0.923, + -0.7959, + 1.0628, + 1.5415, + 0.0959, + -0.3445, + -1.1023, + -0.8032 + ], + [ + -0.3528, + -0.2366, + -1.3871, + 0.1102, + -1.3939, + -1.5129, + -0.1625, + -2.1646, + -1.5377, + -1.1955, + 1.1046, + 1.2738, + -0.416, + 1.3533, + -1.1607, + -0.6612, + 1.9529, + -0.7799, + 1.1878, + 0.3149 + ], + [ + -1.8185, + -0.7613, + -0.7484, + 0.056, + -0.0563, + 0.1426, + -1.308, + -0.2483, + 0.3312, + 1.8308, + -2.2393, + 0.4189, + 0.338, + 0.9658, + -0.0027, + -1.2426, + -0.0279, + -1.0265, + -0.05, + -0.6829 + ], + [ + 3.1619, + -1.2379, + -0.7024, + 0.078, + 1.6246, + 0.0907, + -0.2275, + 0.628, + -1.1891, + -0.1774, + -0.3986, + 0.3924, + 0.1146, + 1.4511, + 1.4386, + -0.1571, + -0.4371, + 0.9987, + 1.2553, + -1.1836 + ], + [ + -0.4557, + -0.9544, + -2.5796, + -0.5447, + 0.5245, + -0.8947, + -0.1188, + -0.7529, + 0.4616, + 0.2079, + -0.5712, + -1.9776, + 0.2162, + 0.3627, + -0.6351, + -0.5888, + -1.8852, + 0.4579, + 0.3662, + -1.0056 + ], + [ + -1.834, + 0.8817, + -0.8256, + 2.2904, + -0.5038, + -1.0731, + -0.1392, + 0.6915, + -0.7085, + -0.1301, + -0.3117, + 0.7622, + -0.7351, + 0.0631, + -1.0513, + -0.0833, + 0.5877, + -0.9319, + 0.2409, + 0.5053 + ], + [ + 0.8495, + 0.4444, + -0.4827, + 0.4883, + -0.3995, + 0.9185, + -0.8604, + 1.047, + -0.233, + -0.8436, + -1.4485, + -0.3134, + 0.3751, + -1.0248, + -1.3578, + 0.9253, + 0.3483, + 0.1809, + 0.5912, + -0.9049 + ], + [ + 1.2565, + 1.1384, + 0.2768, + -0.4465, + 0.6302, + -0.3111, + 1.0922, + -1.0652, + -1.1641, + -0.0109, + -0.4173, + 1.4876, + -0.7059, + -1.722, + 0.2717, + 0.0239, + 0.6765, + 0.3376, + 0.9611, + -1.345 + ], + [ + 0.5332, + -1.1359, + -1.631, + 0.1164, + -0.1761, + -2.0188, + -0.7168, + -0.9484, + -2.1052, + 0.2851, + 0.6889, + -1.0499, + -1.6419, + -0.8496, + 1.3441, + 1.1213, + -1.4303, + 1.0272, + 1.2417, + 0.5654 + ], + [ + 1.2741, + -1.2816, + 0.6041, + -1.0553, + 0.9136, + 2.2753, + 0.4404, + -1.4317, + -0.1662, + 0.1868, + -0.0906, + -0.3384, + 0.3841, + -0.6228, + -0.4819, + -0.5575, + -0.3961, + -2.3142, + 0.933, + -0.5643 + ], + [ + -0.9783, + -0.9242, + 0.0391, + 0.9953, + -0.8548, + 2.0792, + 2.0806, + 0.111, + -0.7344, + 0.4463, + -0.9756, + 0.1581, + -1.1742, + -0.6139, + -0.3884, + -0.776, + -0.0104, + -0.3018, + 0.0214, + 0.2551 + ], + [ + 1.7283, + -2.8789, + 1.014, + -0.2191, + 1.0336, + -1.2346, + 2.1857, + -1.2131, + 2.4149, + 0.9865, + 0.6671, + 0.8336, + -0.6549, + 0.9861, + 0.6966, + 1.6702, + -0.2631, + -0.7709, + 2.1901, + 0.0544 + ], + [ + -0.026, + 0.7211, + -1.6204, + 3.1129, + 0.3854, + -0.108, + 0.0867, + 0.3262, + 0.8285, + 0.9539, + 0.465, + 0.3912, + -0.1569, + -0.2072, + 1.4029, + 1.143, + -0.5356, + -0.1537, + -0.2597, + 0.2985 + ], + [ + -0.2932, + 1.1114, + 0.9554, + 2.4731, + 0.2844, + -0.5852, + -0.6563, + -1.5908, + 2.1627, + -0.0178, + 1.3029, + -0.1306, + -1.1486, + 1.7572, + -1.7608, + -0.7553, + -0.1951, + 0.2307, + 0.8012, + 0.1311 + ], + [ + 0.3219, + 0.493, + 0.4336, + -0.9166, + -0.7377, + -0.6422, + -0.7032, + 2.175, + -0.4131, + 0.6345, + 1.9194, + -1.3812, + 0.3792, + -0.1227, + -0.4557, + 0.6576, + 0.2697, + -0.4173, + 0.2863, + 0.1064 + ], + [ + 0.5896, + 1.9105, + 0.6389, + -0.0086, + -0.939, + -0.1986, + -1.6062, + -1.0279, + -0.0664, + 0.9184, + 0.9772, + -1.4321, + -0.131, + -1.1393, + 0.5561, + 1.2368, + -1.6942, + 0.4541, + 0.2363, + -0.3074 + ], + [ + 1.2553, + 0.3427, + 0.8086, + -1.6661, + 1.7529, + 1.023, + -0.1816, + 0.2055, + 1.0421, + 1.444, + -1.6754, + -1.3627, + -0.2766, + -0.5002, + 0.9091, + 0.9691, + -1.0485, + -1.4183, + 0.432, + -1.2734 + ], + [ + -0.9888, + 0.004, + 1.4654, + -0.0091, + -1.0239, + -1.6815, + 1.9792, + -0.132, + 0.8133, + -0.8675, + -1.177, + 0.0519, + 0.5364, + 1.3463, + -0.2466, + -1.2147, + -0.8797, + -0.4156, + -0.3952, + -0.2546 + ], + [ + -0.3117, + 1.7072, + -0.0755, + -0.6607, + -0.8861, + -1.0431, + 0.4202, + 0.4078, + 0.1515, + 0.5624, + -0.1913, + -0.1157, + 1.0234, + -1.284, + 0.745, + -0.8799, + -0.7157, + -1.0794, + 0.4314, + -0.0992 + ], + [ + 1.402, + -2.4083, + 0.4545, + 1.202, + 0.466, + 0.8986, + 0.3873, + -1.1954, + -0.7295, + -0.9044, + -0.5264, + -1.508, + -0.6502, + -0.0292, + -0.4135, + 0.4951, + 0.676, + 1.1259, + -0.7328, + -0.577 + ], + [ + 1.2228, + 0.6662, + 0.9003, + 0.9653, + -2.6854, + -0.1184, + 1.5777, + 0.355, + -0.1113, + 1.3689, + -0.6008, + 1.0529, + -1.0451, + -0.1183, + 0.2116, + 1.4146, + -0.124, + 0.7687, + 1.1244, + 0.1898 + ], + [ + -1.2415, + 1.71, + 1.6849, + -1.0357, + 1.5733, + 0.6499, + -1.5619, + -0.3177, + 0.0207, + 1.6492, + -0.4138, + 0.6249, + 0.8653, + 0.2456, + -0.2721, + 2.1352, + -0.557, + -0.8432, + 0.1793, + -0.1528 + ], + [ + 1.3637, + 0.0283, + 1.4619, + 2.6142, + -0.7994, + 0.4259, + 0.1526, + -0.8818, + 0.8592, + 0.5728, + 0.6268, + -0.9715, + -1.365, + -0.3315, + -0.3994, + 0.9386, + 0.6107, + -2.4714, + -0.2895, + -0.7416 + ], + [ + -0.4558, + -1.1376, + -0.4454, + 0.9436, + -0.1809, + -1.2921, + -1.0421, + -0.9698, + 0.6395, + 1.0005, + -0.1009, + 0.583, + -0.5418, + -0.1262, + -1.1714, + 1.228, + 1.0276, + 0.0435, + 1.3971, + 0.0524 + ], + [ + 1.0838, + -0.4271, + -0.1709, + 0.0122, + -0.8556, + 0.3748, + -0.9003, + 0.4325, + -0.2517, + -0.1606, + -0.5199, + 1.4721, + 0.8963, + 0.2706, + -1.0319, + -1.8448, + 0.6037, + -0.1553, + -1.7228, + -0.3219 + ], + [ + -0.0606, + -1.8436, + 0.9568, + 0.3851, + -0.1354, + 0.0804, + -0.214, + 0.3143, + -0.7776, + -0.3072, + 0.4482, + -1.4658, + -1.3941, + -2.2366, + -0.1666, + 1.2007, + 1.1896, + -0.6913, + 0.8077, + -0.0427 + ], + [ + 1.9569, + 1.9565, + 0.2668, + -0.758, + -1.4376, + -1.4579, + 1.22, + 2.0497, + -0.8531, + -0.2868, + 1.879, + 0.3755, + -0.603, + 0.9532, + -0.9112, + 0.4927, + 1.787, + -0.6917, + 1.2992, + -1.2786 + ], + [ + 0.2088, + -1.6042, + -1.4851, + 0.3304, + -0.758, + 0.4025, + 0.0821, + -0.0439, + 0.0077, + -0.2116, + -0.9716, + 0.3638, + -0.9135, + 0.2522, + 0.7707, + 0.8041, + 0.2783, + -0.264, + -0.7103, + -0.7538 + ], + [ + -0.0609, + -0.4011, + -2.6306, + 0.9948, + 0.0578, + 0.5269, + -0.7246, + -1.8984, + 1.4391, + 0.4589, + -2.1943, + 0.0407, + -0.9644, + 0.6999, + -0.2134, + -1.4317, + 0.1553, + 1.1814, + 0.529, + -0.1315 + ], + [ + 0.0479, + -0.6824, + -0.9821, + 0.8099, + -0.0845, + -1.4304, + 0.2215, + 1.2053, + 0.5841, + -0.5418, + -1.1139, + -1.0214, + -0.6586, + -0.9873, + -0.627, + -0.8161, + 0.4356, + -0.7296, + 1.2816, + 0.1747 + ], + [ + 1.5953, + 0.1412, + -1.3928, + -0.3266, + 0.3538, + 2.2636, + 0.3221, + -0.5808, + -1.2627, + -1.418, + -0.8741, + -0.2719, + 1.1585, + -0.7015, + -0.9239, + 1.8709, + -0.3701, + -0.9271, + -0.4791, + -1.0854 + ], + [ + 1.55, + 0.6292, + -0.4949, + 1.35, + 0.0748, + 0.4706, + 0.2618, + -0.5674, + 0.1062, + -0.8955, + 0.9664, + -1.2191, + 0.1806, + -2.2524, + 0.2409, + -0.3491, + 2.2583, + -0.6011, + -0.54, + -0.1752 + ], + [ + 0.74, + 0.4227, + 0.2826, + 0.0253, + -0.9664, + 1.1868, + 1.2465, + -0.1074, + 0.3013, + -1.4853, + 0.4403, + 0.1162, + 0.1427, + -1.7351, + -0.5726, + -2.2293, + 0.2641, + 0.633, + 1.0758, + 2.5179 + ], + [ + -0.1529, + -0.3999, + 0.2292, + -0.1002, + -1.9142, + 1.2822, + 0.9362, + 0.6656, + 0.7305, + -1.304, + -0.9037, + 0.8639, + -0.4384, + -1.5435, + -0.838, + 0.6024, + -1.9425, + 0.8417, + 1.4527, + -1.0574 + ], + [ + -0.4808, + -0.1128, + -1.5772, + 1.6347, + -0.4475, + -1.336, + -0.8358, + -0.667, + -0.3813, + -0.0675, + 0.1437, + -0.0597, + 0.9919, + 1.9289, + -0.3874, + 0.5108, + -0.387, + 2.0789, + -1.4989, + -0.7148 + ], + [ + -0.4468, + -0.3782, + 0.2239, + 0.1208, + -0.66, + 0.3209, + 0.2253, + -1.2614, + -0.4193, + 1.4624, + 0.5541, + -0.1232, + 0.5891, + -0.7703, + -0.6324, + 0.7311, + -0.2307, + 0.6328, + 0.3758, + 1.5065 + ], + [ + 0.2205, + 1.46, + 1.4749, + -1.0308, + -0.8201, + -1.0855, + -1.9632, + 0.6111, + -0.0857, + 2.0307, + 0.261, + -0.1343, + 0.1226, + -0.8113, + -0.0981, + 0.5672, + 0.1665, + 0.5348, + -2.84, + 1.3184 + ], + [ + -0.1868, + -0.8558, + -0.5043, + 1.3187, + 0.5687, + -0.5904, + 1.6735, + -0.9743, + -0.306, + -0.63, + -0.039, + 0.329, + -0.8927, + 0.6315, + -1.013, + -0.8468, + 2.2813, + -0.3968, + -0.1701, + 0.3624 + ], + [ + -0.4627, + 0.7633, + 1.0987, + -0.7525, + -1.5633, + -0.076, + 0.1037, + 0.6978, + -0.1722, + -0.0461, + 0.3875, + 1.0909, + 0.2668, + -2.4256, + 0.293, + 0.6107, + -0.6653, + -0.4939, + -0.5135, + 0.4648 + ], + [ + 1.1293, + -0.4776, + -1.1841, + 1.945, + 0.3976, + 1.0899, + -1.8488, + -0.3147, + 0.368, + -0.6088, + -1.4179, + -0.0511, + -0.8609, + 1.827, + -1.0346, + 0.894, + -2.0132, + -0.3736, + -0.8182, + -0.9869 + ], + [ + 0.6505, + 1.5063, + 0.5592, + 0.6466, + -0.2412, + 1.2139, + 1.4475, + 0.727, + 0.6906, + -0.3043, + -1.9162, + -1.2834, + 1.9527, + -0.4613, + 1.2887, + -0.8036, + -0.0644, + 0.1568, + 0.1467, + 0.1209 + ], + [ + 0.0219, + 1.153, + 0.5898, + 0.5228, + -0.1517, + 2.1095, + 1.1348, + 0.3478, + 0.4166, + 1.4646, + -0.8298, + -0.8746, + -0.7066, + 1.1111, + -0.6206, + -1.446, + 0.838, + -1.1856, + -0.6124, + -0.5703 + ], + [ + 1.4307, + -0.4624, + -0.7633, + 1.2186, + -0.145, + -0.5863, + 0.2839, + -0.5801, + 0.4696, + 0.7164, + -0.0279, + 0.2549, + 1.2149, + 1.4674, + 0.3822, + 0.0469, + 0.9921, + -0.092, + 1.2116, + 0.1597 + ], + [ + -0.4714, + 2.2288, + 1.4301, + -0.423, + -0.7054, + 1.8221, + -0.7599, + -0.5916, + 0.4065, + -0.8481, + -0.3276, + -1.6445, + -0.5538, + -0.3662, + 0.0102, + -0.9233, + 0.809, + 0.4401, + -0.3233, + 0.9434 + ], + [ + -0.1985, + -0.3812, + 1.284, + 0.1607, + -0.1346, + -0.6249, + -1.7091, + 0.6016, + 2.1655, + -1.2371, + -1.0872, + 1.0387, + 0.9008, + -1.9731, + -0.0579, + -0.1982, + -0.4899, + 1.0908, + -1.5214, + 0.5036 + ], + [ + 0.7546, + -0.2705, + 1.25, + -1.345, + 1.8198, + -0.2936, + 0.2075, + -1.3354, + -1.8317, + 0.9607, + -0.3799, + -1.6127, + -0.0916, + -0.6888, + 0.7975, + -0.0281, + 0.3594, + 0.5942, + 0.4086, + 0.1016 + ], + [ + 1.77, + -0.4767, + -1.1595, + 0.1532, + -0.0326, + 0.1836, + -0.3286, + 1.4888, + -2.8899, + 1.0536, + 1.9232, + -0.6968, + -0.6339, + -0.0347, + 0.5239, + -0.7647, + -0.194, + 0.7219, + 0.1894, + -1.7706 + ], + [ + -0.4127, + 0.7101, + 1.4662, + -2.0593, + -0.9915, + -0.3649, + 0.4977, + 0.6697, + -0.1104, + 0.2658, + -1.132, + 1.4055, + 0.6701, + -0.9953, + -1.319, + 0.419, + -0.2617, + -0.6068, + 0.385, + 0.8164 + ], + [ + -0.3437, + 1.0851, + -0.2465, + -0.5611, + 0.7702, + 0.0436, + -1.2053, + -0.9226, + -1.7781, + 1.1869, + -1.7516, + -2.4417, + -0.6537, + 0.1807, + 0.9348, + -0.5243, + -0.7177, + -1.9971, + 1.2994, + -0.5201 + ], + [ + 0.1902, + -0.5785, + -0.3695, + -1.3706, + 0.3818, + 1.5547, + -1.0842, + 0.2153, + 0.4955, + 0.8092, + -1.1719, + -0.2028, + 1.1535, + 0.3906, + 1.0151, + -0.4632, + -0.3776, + 0.047, + 1.4837, + 0.6819 + ], + [ + 0.7243, + 0.4333, + 0.6734, + -0.7128, + -1.2333, + 0.8693, + 1.3302, + -0.4147, + -1.0366, + -0.4449, + -1.4051, + 0.1941, + 0.2919, + -1.7384, + -0.3388, + 0.5371, + 0.2249, + -0.4457, + 0.456, + 0.7743 + ], + [ + 0.9661, + 0.0273, + 0.3233, + -1.7933, + 0.238, + -1.152, + 0.6648, + 0.73, + 1.2548, + 1.6302, + 0.8051, + -1.1529, + -0.2192, + 0.2759, + -0.4039, + -0.0211, + -1.4843, + -1.1061, + 0.9614, + 0.1779 + ], + [ + 0.6516, + 0.2064, + -0.127, + 0.9358, + 0.0927, + 0.2763, + -1.8073, + -0.7372, + 0.4339, + 0.544, + 2.2831, + 1.5049, + 1.6803, + 0.6802, + -1.216, + 0.5923, + 0.8007, + -0.8011, + -0.35, + 2.2807 + ], + [ + 0.4354, + -0.299, + -0.2509, + -0.5827, + -0.5399, + -0.5583, + 0.5145, + -1.0531, + 0.2237, + -0.0867, + 1.4819, + 0.6379, + -1.1996, + -1.2472, + 1.4059, + 0.7136, + 1.0626, + 0.1175, + -0.7227, + -0.2855 + ], + [ + 2.326, + 1.0542, + -0.4181, + -0.1197, + -1.7518, + 0.875, + 0.7441, + -0.6108, + 0.525, + 0.8426, + 0.0483, + 0.6573, + 0.0141, + -0.3728, + 0.9688, + 1.5146, + -0.9698, + 1.3348, + 0.2119, + -0.2309 + ], + [ + -1.31, + -0.3123, + -1.2402, + 0.1168, + 0.7187, + -0.0896, + -0.3067, + 1.8945, + 0.0741, + 0.174, + -0.2408, + -0.8742, + -2.0949, + -1.311, + 1.1656, + 1.2042, + 0.331, + -1.7272, + 0.7415, + 1.1023 + ], + [ + -1.1237, + 0.9391, + 0.7248, + 0.2172, + -0.5511, + 0.6542, + -0.5232, + 0.5059, + -0.2229, + -0.1082, + -1.7101, + 1.3909, + -0.5176, + -0.9714, + -0.4962, + 1.664, + 0.5924, + 0.0542, + -0.0639, + 0.5606 + ], + [ + 0.7717, + 0.4342, + 0.2559, + -0.5208, + 1.5665, + -0.9007, + 0.5495, + 0.8782, + -1.2528, + -1.0134, + -1.1599, + -0.4838, + -0.3956, + 1.5229, + 0.1815, + -0.6772, + -0.1742, + -1.2118, + -0.0586, + -2.3458 + ], + [ + 0.4184, + 0.0507, + -0.4365, + 0.0106, + 0.3925, + 0.0525, + 0.0699, + 0.0136, + -0.5028, + 1.4603, + -0.8616, + 0.0528, + 0.7107, + -1.6958, + 0.6961, + 0.749, + -0.6163, + 0.0456, + 1.3224, + -1.5463 + ], + [ + 1.2705, + 1.5194, + -1.0174, + 0.1728, + 0.5462, + -0.9114, + 0.5157, + 0.2767, + 0.3982, + -0.2879, + 0.4183, + -0.5905, + -0.0314, + 0.5555, + 0.8349, + 0.2477, + 0.1298, + 0.5984, + -0.072, + 1.186 + ], + [ + -1.4858, + -1.3165, + 1.6357, + 1.407, + -0.9654, + -0.6533, + 0.438, + -0.7006, + -1.4518, + -0.2173, + -0.8564, + 0.0853, + 1.2497, + 0.0592, + 0.3054, + -0.2407, + -1.4306, + -1.4354, + -0.1878, + -0.9729 + ], + [ + 1.4146, + -0.9281, + -0.3996, + 0.2136, + -1.2962, + 0.5947, + 0.0234, + 0.5913, + 0.0498, + -1.2492, + 1.3622, + 0.0303, + -0.1406, + 1.0544, + -0.4121, + -0.2487, + 0.1885, + -0.1841, + 1.7337, + 0.1356 + ], + [ + 1.3118, + 1.9343, + 1.5765, + -1.1271, + 0.9296, + -1.5173, + 0.9496, + -0.1351, + -0.4726, + 1.6528, + -0.3443, + 0.1774, + -1.6216, + 2.3301, + -0.4317, + -0.2967, + 0.3336, + 1.5614, + 0.5239, + 0.0456 + ], + [ + 0.6397, + 1.0026, + -0.4708, + -0.5532, + -0.7832, + 1.1904, + -2.135, + -0.594, + 1.9856, + -1.4386, + 0.1647, + -1.9993, + -0.9835, + -0.6135, + -0.0065, + 0.4442, + -1.6695, + -1.1663, + 1.6551, + -0.2211 + ], + [ + 1.1373, + 0.7798, + -0.1248, + 0.7261, + -0.8628, + 0.4514, + 0.1191, + -1.1251, + -0.7027, + 0.2192, + -1.0791, + 0.1474, + 0.04, + -1.2375, + 2.0006, + -0.0523, + 0.189, + -0.0867, + -0.7848, + 0.6834 + ], + [ + 0.2231, + -0.7509, + 0.5496, + 0.4058, + 0.0411, + 0.2364, + -0.7832, + -3.6367, + -1.2292, + 2.285, + -1.669, + 2.3416, + -0.847, + 2.099, + -1.54, + -0.1755, + -1.1669, + -1.3129, + -1.4352, + 1.0025 + ], + [ + -1.6897, + 0.0125, + 0.8603, + -0.5641, + -0.1021, + 0.9465, + 1.1911, + 0.0972, + 0.2012, + 0.3283, + -0.5206, + 1.6208, + 1.1497, + 1.2348, + 2.2502, + 0.9546, + -0.5284, + -1.4671, + 0.9038, + -0.5993 + ], + [ + -0.732, + 0.1327, + 1.6104, + -0.0646, + 1.43, + -2.1812, + -0.0841, + -0.3023, + -1.7387, + -1.6511, + -0.5601, + 1.356, + 2.7561, + -1.1899, + -0.8138, + -1.1107, + 0.1544, + -0.7469, + 1.8088, + -0.0592 + ], + [ + -0.5646, + -0.3306, + -0.064, + -0.6655, + 0.4984, + 0.1532, + -0.7117, + -1.1264, + -0.5781, + 1.0013, + 1.146, + -1.3487, + 0.5337, + 0.3819, + 0.0489, + -0.2994, + -1.0327, + -0.7898, + 1.0024, + 0.4805 + ], + [ + 0.9905, + -0.7819, + -0.0207, + -0.607, + 1.4292, + -1.3072, + 0.6166, + -0.799, + -0.8048, + 0.2746, + 0.3502, + -1.0477, + 0.3548, + -0.8755, + 1.548, + -0.9908, + -1.3235, + 0.2564, + -0.0763, + -0.3935 + ], + [ + -0.5773, + -0.8525, + 0.3657, + -1.0686, + 1.096, + -2.788, + 0.8837, + -1.3865, + 0.4778, + 0.506, + -1.0256, + -0.7631, + 0.0299, + 1.1358, + 0.3911, + -0.5343, + -1.1064, + -1.4591, + 0.0172, + 0.6138 + ], + [ + -1.6318, + -0.0936, + -0.7996, + 1.1002, + 0.6576, + 1.3225, + 0.0588, + 0.0047, + -0.0142, + 0.4834, + 0.4746, + -0.1941, + -0.5444, + -0.9089, + 0.7556, + 0.2221, + -0.9489, + -0.08, + 1.6407, + 0.6503 + ], + [ + -0.5933, + -0.4653, + 0.9605, + 1.5295, + 1.8145, + -0.3425, + -1.1254, + -0.1492, + -0.8059, + -0.5591, + 1.0311, + -0.7333, + -2.7195, + 0.2144, + 0.3544, + 0.3933, + -1.2386, + -0.5931, + -1.4562, + 0.9007 + ], + [ + 1.5055, + -0.6687, + 1.2637, + -0.4069, + -0.9543, + 2.2725, + -1.3887, + 0.6192, + 0.2349, + -0.2967, + 0.9429, + 1.0616, + 0.6485, + 0.1225, + -0.6763, + -0.3906, + 0.0541, + -3.4325, + -3.1944, + 0.1077 + ], + [ + -0.4841, + -0.6157, + -0.0292, + 0.8495, + -0.7487, + -1.1898, + -0.6719, + -0.9199, + 0.0557, + -1.182, + -1.0903, + 0.827, + -0.1005, + -1.6563, + 0.3565, + 0.2699, + -1.1526, + 0.4731, + 0.6436, + 1.7463 + ] + ] +} diff --git a/turing/01-predictive_checks.jl b/turing/01-predictive_checks.jl index 4bd137d..16b7768 100644 --- a/turing/01-predictive_checks.jl +++ b/turing/01-predictive_checks.jl @@ -1,4 +1,5 @@ using Turing +using DataFrames using Random: seed! seed!(123) @@ -16,7 +17,7 @@ y = rand(Bernoulli(real_p), 1_000) for i in eachindex(y) y[i] ~ Bernoulli(p) end - return(; p, y) + return (; p, y) end # Instantiate the model @@ -27,6 +28,7 @@ model = coin_flip(y) # i.e. don't condition on data. # Just use the Prior() sampler: prior_chain = sample(model, Prior(), 2_000) +println(DataFrame(summarystats(prior_chain))) # Step 2: Instantiate a Vector of missing values # with the same dimensions as y: @@ -45,6 +47,7 @@ prior_check = predict(model_missing, prior_chain) # i.e. do condition on data. # Just use any sampler except the Prior(): posterior_chain = sample(model, MH(), 2_000) +println(DataFrame(summarystats(posterior_chain))) # Step 2: Instantiate a Vector of missing values # with the same dimensions as y: diff --git a/turing/02-linear_regression-kidiq.jl b/turing/02-linear_regression-kidiq.jl index a2a0fc7..fbcdc1a 100644 --- a/turing/02-linear_regression-kidiq.jl +++ b/turing/02-linear_regression-kidiq.jl @@ -37,6 +37,7 @@ model = linear_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec @@ -63,6 +64,7 @@ chn_qr = sample(model_qr, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) betas = mapslices(x -> R_ast^-1 * x, chn_qr[:, namesingroup(chn_qr, :β), :].value.data, dims=[2]) chain_beta = setrange(Chains(betas, ["real_β[$i]" for i in 1:size(Q_ast, 2)]), 1_001:1:2_000) chn_qr_reconstructed = hcat(chain_beta, chn_qr) +println(DataFrame(summarystats(chn_qr_reconstructed))) # results: # parameters mean std naive_se mcse ess rhat diff --git a/turing/03-logistic_regression-wells.jl b/turing/03-logistic_regression-wells.jl index d3419f4..b0ddbfd 100644 --- a/turing/03-logistic_regression-wells.jl +++ b/turing/03-logistic_regression-wells.jl @@ -37,6 +37,7 @@ model = logistic_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/04-ordinal_regression-esoph.jl b/turing/04-ordinal_regression-esoph.jl index 179abcf..7fa9c32 100644 --- a/turing/04-ordinal_regression-esoph.jl +++ b/turing/04-ordinal_regression-esoph.jl @@ -50,6 +50,7 @@ model = ordered_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/05-poisson_regression-roaches.jl b/turing/05-poisson_regression-roaches.jl index 92f8afd..dafa1fc 100644 --- a/turing/05-poisson_regression-roaches.jl +++ b/turing/05-poisson_regression-roaches.jl @@ -36,6 +36,7 @@ model = poisson_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/06-robust_linear_regression-duncan.jl b/turing/06-robust_linear_regression-duncan.jl index 6c22cc4..e4dc8e5 100644 --- a/turing/06-robust_linear_regression-duncan.jl +++ b/turing/06-robust_linear_regression-duncan.jl @@ -38,6 +38,7 @@ model = student_t_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/07-robust_beta_binomial_regression-wells.jl b/turing/07-robust_beta_binomial_regression-wells.jl index 16e3279..ae55967 100644 --- a/turing/07-robust_beta_binomial_regression-wells.jl +++ b/turing/07-robust_beta_binomial_regression-wells.jl @@ -52,6 +52,7 @@ model = beta_binomial_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/08-robust_robit_regression-wells.jl b/turing/08-robust_robit_regression-wells.jl index 35210aa..3b5bfa2 100644 --- a/turing/08-robust_robit_regression-wells.jl +++ b/turing/08-robust_robit_regression-wells.jl @@ -39,3 +39,4 @@ model = robit_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) diff --git a/turing/09-robust_negative_binomial_regression-roaches.jl b/turing/09-robust_negative_binomial_regression-roaches.jl index 8683cf6..91a06f9 100644 --- a/turing/09-robust_negative_binomial_regression-roaches.jl +++ b/turing/09-robust_negative_binomial_regression-roaches.jl @@ -46,6 +46,7 @@ model = negative_binomial_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/10-robust_zero_inflated_regression-negative_binomial-roaches.jl b/turing/10-robust_zero_inflated_regression-negative_binomial-roaches.jl index 850fdb4..9c509a0 100644 --- a/turing/10-robust_zero_inflated_regression-negative_binomial-roaches.jl +++ b/turing/10-robust_zero_inflated_regression-negative_binomial-roaches.jl @@ -55,6 +55,7 @@ model = zero_inflated_negative_binomial_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/10-robust_zero_inflated_regression-poisson-roaches.jl b/turing/10-robust_zero_inflated_regression-poisson-roaches.jl index e4e33fe..63db3c8 100644 --- a/turing/10-robust_zero_inflated_regression-poisson-roaches.jl +++ b/turing/10-robust_zero_inflated_regression-poisson-roaches.jl @@ -45,6 +45,7 @@ model = zero_inflated_poisson_regression(X, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/11-sparse_horseshoe.jl b/turing/11-sparse_horseshoe.jl new file mode 100644 index 0000000..8aad307 --- /dev/null +++ b/turing/11-sparse_horseshoe.jl @@ -0,0 +1,91 @@ +using Turing +using CSV +using DataFrames +using StatsBase +using LinearAlgebra + +# reproducibility +using Random: seed! + +seed!(123) + +# load data +df = CSV.read("datasets/sparse_regression.csv", DataFrame) + +# define data matrix X and standardize +X = select(df, Not(:y)) |> Matrix |> float +X = standardize(ZScoreTransform, X; dims=1) + +# define dependent variable y and standardize +y = df[:, :y] |> float +y = standardize(ZScoreTransform, y; dims=1) + +# define the model +@model function sparse_horseshoe_regression(X, y; predictors=size(X, 2)) + # priors + α ~ TDist(3) * 2.5 + λ ~ filldist(truncated(Cauchy(0, 1); lower=0), predictors) + τ ~ truncated(Cauchy(0, 1); lower=0) + σ ~ Exponential(1) + + β ~ MvNormal(Diagonal((λ .* τ) .^ 2)) + + # likelihood + y ~ MvNormal(α .+ X * β, σ^2 * I) + return (; y, α, λ, τ, σ, β) +end + +# instantiate the model +model = sparse_horseshoe_regression(X, y) + +# sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup +chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) + +# results: +# parameters mean std mcse ess_bulk ess_tail rhat ess_per_sec +# Symbol Float64 Float64 Float64 Float64 Float64 Float64 Float64 +# +# α 0.0042 0.0305 0.0040 59.5643 80.2524 1.0706 0.0642 +# λ[1] -2.1122 6.6393 2.0278 13.0743 46.8402 1.5634 0.0141 +# λ[2] -0.1085 1.1720 0.1996 23.9859 146.5381 1.2223 0.0259 +# λ[3] 0.6606 4.3584 1.4231 10.9612 30.3532 1.9050 0.0118 +# λ[4] -0.3683 1.3287 0.3048 16.2733 93.6187 1.3564 0.0175 +# λ[5] -0.0138 4.8649 1.5867 11.0779 30.2512 1.8539 0.0119 +# λ[6] -0.0985 3.6917 1.2181 10.8100 60.5601 1.9403 0.0117 +# λ[7] -0.3905 1.3529 0.3539 11.8725 58.3778 1.7623 0.0128 +# λ[8] -1.0362 2.6053 0.8181 11.5641 40.9582 1.7344 0.0125 +# λ[9] 0.9852 1.0252 0.1503 37.0615 71.6680 1.1240 0.0400 +# λ[10] 0.0326 1.6489 0.3335 20.7106 113.6655 1.2161 0.0223 +# λ[11] 0.7310 2.7803 0.8534 12.8535 20.8644 1.6288 0.0139 +# λ[12] -0.7746 1.7907 0.5203 12.4086 37.8785 1.6285 0.0134 +# λ[13] 1.1169 1.4141 0.2771 24.5536 33.7786 1.1852 0.0265 +# λ[14] -0.3582 5.4360 1.8088 11.0894 46.2207 1.9098 0.0120 +# λ[15] 0.6097 1.8694 0.4101 17.7370 57.8266 1.2889 0.0191 +# λ[16] -0.6527 2.4383 0.5938 13.4484 52.2172 1.4904 0.0145 +# λ[17] -0.4548 1.3797 0.2970 18.9404 75.0517 1.2441 0.0204 +# λ[18] 0.1632 1.3832 0.3004 20.4743 105.9446 1.2071 0.0221 +# λ[19] 0.2587 1.9653 0.5230 12.2939 39.3894 1.6290 0.0133 +# λ[20] 1.0108 5.1065 1.7174 11.1679 47.7117 1.8312 0.0120 +# τ 0.0733 0.0225 0.0041 29.0739 134.8200 1.1333 0.0314 +# σ 0.3250 0.0225 0.0022 116.6290 207.2708 1.0186 0.1258 +# β[1] 0.4853 0.0345 0.0052 45.8155 110.9592 1.0770 0.0494 +# β[2] 0.0030 0.0246 0.0035 49.2652 215.6946 1.0856 0.0531 +# β[3] 0.2613 0.0341 0.0045 58.8703 174.2487 1.0742 0.0635 +# β[4] 0.0082 0.0286 0.0041 45.9931 99.5215 1.0843 0.0496 +# β[5] -0.2896 0.0315 0.0030 109.3217 165.7488 1.0426 0.1179 +# β[6] 0.2234 0.0314 0.0035 79.2594 103.0314 1.0828 0.0855 +# β[7] 0.0172 0.0292 0.0044 45.4594 166.8813 1.1061 0.0490 +# β[8] 0.1240 0.0342 0.0048 52.9156 193.5309 1.1070 0.0571 +# β[9] 0.0031 0.0338 0.0062 31.2744 24.4042 1.1500 0.0337 +# β[10] 0.0260 0.0338 0.0047 52.5279 115.6564 1.0818 0.0566 +# β[11] 0.0813 0.0363 0.0042 68.8451 143.6499 1.0765 0.0742 +# β[12] -0.0063 0.0305 0.0040 58.7870 120.4656 1.0386 0.0634 +# β[13] -0.0202 0.0238 0.0025 93.3616 207.3711 1.0294 0.1007 +# β[14] -0.4260 0.0333 0.0049 47.0491 66.6016 1.1070 0.0507 +# β[15] -0.0153 0.0281 0.0039 47.5601 161.6239 1.1152 0.0513 +# β[16] -0.0198 0.0306 0.0071 20.3811 126.5139 1.2857 0.0220 +# β[17] -0.0078 0.0253 0.0031 65.9642 111.4478 1.0430 0.0711 +# β[18] 0.0113 0.0250 0.0033 58.6306 55.4533 1.0512 0.0632 +# β[19] -0.0046 0.0287 0.0034 73.1314 60.9919 1.0320 0.0789 +# β[20] -0.3487 0.0348 0.0057 37.4172 51.0361 1.1133 0.0404 diff --git a/turing/12-sparse_horseshoe_p.jl b/turing/12-sparse_horseshoe_p.jl new file mode 100644 index 0000000..75126c5 --- /dev/null +++ b/turing/12-sparse_horseshoe_p.jl @@ -0,0 +1,93 @@ +using Turing +using CSV +using DataFrames +using StatsBase +using LinearAlgebra + +# reproducibility +using Random: seed! + +seed!(123) + +# load data +df = CSV.read("datasets/sparse_regression.csv", DataFrame) + +# define data matrix X and standardize +X = select(df, Not(:y)) |> Matrix |> float +X = standardize(ZScoreTransform, X; dims=1) + +# define dependent variable y and standardize +y = df[:, :y] |> float +y = standardize(ZScoreTransform, y; dims=1) + +# define the model +@model function sparse_horseshoe_p_regression(X, y; predictors=size(X, 2)) + # priors + α ~ TDist(3) * 2.5 + λ ~ filldist(truncated(Cauchy(0, 1); lower=0), predictors) + τ ~ truncated(Cauchy(0, 1 / predictors); lower=0) + η ~ truncated(Cauchy(0, 1); lower=0) + σ ~ Exponential(1) + + β ~ MvNormal(Diagonal(((η * τ) .* λ) .^ 2)) + + # likelihood + y ~ MvNormal(α .+ X * β, σ^2 * I) + return (; y, α, λ, τ, η, σ, β) +end + +# instantiate the model +model = sparse_horseshoe_p_regression(X, y) + +# sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup +chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) + +# results: +# parameters mean std mcse ess_bulk ess_tail rhat ess_per_sec +# Symbol Float64 Float64 Float64 Float64 Float64 Float64 Float64 +# +# α -0.0002 0.0323 0.0030 118.1721 278.9664 1.0243 0.1621 +# λ[1] 6.0579 10.0663 3.0204 11.4497 16.7640 1.6951 0.0157 +# λ[2] -0.0932 1.4153 0.3142 19.2305 113.7813 1.2594 0.0264 +# λ[3] -2.1663 4.1320 1.2938 13.3036 38.5615 1.6035 0.0182 +# λ[4] -0.0910 2.0557 0.4913 18.0305 33.4431 1.3416 0.0247 +# λ[5] -2.1000 5.6797 1.7336 12.2582 22.8753 1.7188 0.0168 +# λ[6] 4.1577 2.6026 0.3537 70.9042 71.3911 1.0716 0.0972 +# λ[7] -0.0265 1.7567 0.4542 14.3379 53.9656 1.4495 0.0197 +# λ[8] -1.2036 2.7373 0.8406 13.6670 66.2890 1.5591 0.0187 +# λ[9] -0.4556 1.5960 0.3653 16.0928 83.1542 1.3777 0.0221 +# λ[10] -0.0798 1.7582 0.4172 14.8505 94.2973 1.4384 0.0204 +# λ[11] 1.3065 2.5436 0.7263 11.9299 25.9705 1.6909 0.0164 +# λ[12] -0.1817 1.9640 0.3500 24.6331 171.4874 1.1679 0.0338 +# λ[13] -0.1405 1.9775 0.4119 20.3797 72.7982 1.2415 0.0280 +# λ[14] -3.5763 5.9160 1.8301 12.9173 70.7797 1.6019 0.0177 +# λ[15] -0.9324 1.6119 0.4098 14.5331 44.1432 1.4755 0.0199 +# λ[16] 0.5214 1.9429 0.4558 16.4813 27.3755 1.3681 0.0226 +# λ[17] 0.6175 1.4098 0.2924 23.2504 39.4808 1.2017 0.0319 +# λ[18] 0.2180 1.6932 0.4328 13.5904 58.7350 1.5340 0.0186 +# λ[19] -0.1591 1.4039 0.3444 16.1316 33.6346 1.3759 0.0221 +# λ[20] -2.6696 4.3510 1.3857 13.3690 55.2658 1.6173 0.0183 +# τ 0.0629 0.0533 0.0058 54.2184 109.9163 1.1076 0.0744 +# η 1.5893 1.1604 0.1441 46.6923 142.6147 1.1152 0.0640 +# σ 0.3277 0.0267 0.0032 81.0144 157.5050 1.0326 0.1111 +# β[1] 0.4926 0.0360 0.0044 69.2672 97.7435 1.0376 0.0950 +# β[2] 0.0073 0.0258 0.0047 30.1671 167.0479 1.1761 0.0414 +# β[3] 0.2565 0.0379 0.0041 89.2120 239.9928 1.0207 0.1224 +# β[4] 0.0147 0.0305 0.0037 78.0556 139.8530 1.0337 0.1071 +# β[5] -0.2802 0.0328 0.0027 147.2640 257.6856 1.0237 0.2020 +# β[6] 0.2099 0.0343 0.0036 90.6120 157.6924 1.0392 0.1243 +# β[7] 0.0219 0.0276 0.0025 127.7747 191.9369 1.0125 0.1752 +# β[8] 0.1323 0.0327 0.0026 153.7350 364.8132 1.0359 0.2109 +# β[9] -0.0008 0.0276 0.0027 108.0584 166.8649 1.0372 0.1482 +# β[10] 0.0214 0.0295 0.0030 102.5774 145.1183 1.0140 0.1407 +# β[11] 0.0643 0.0369 0.0049 56.9598 238.9494 1.0615 0.0781 +# β[12] -0.0130 0.0267 0.0023 137.9953 260.4812 1.0264 0.1893 +# β[13] -0.0118 0.0240 0.0021 125.8721 236.2345 1.0301 0.1726 +# β[14] -0.4283 0.0365 0.0040 87.5668 59.6759 1.0415 0.1201 +# β[15] -0.0220 0.0296 0.0029 104.5022 314.5114 1.0348 0.1433 +# β[16] -0.0033 0.0260 0.0025 104.9021 171.2699 1.0245 0.1439 +# β[17] -0.0020 0.0275 0.0030 83.8294 175.2058 1.0545 0.1150 +# β[18] 0.0070 0.0268 0.0027 106.2810 142.6289 1.0446 0.1458 +# β[19] 0.0015 0.0284 0.0035 64.7765 88.3422 1.0586 0.0888 +# β[20] -0.3416 0.0367 0.0051 52.7042 136.4280 1.0851 0.0723 diff --git a/turing/13-sparse_finnish_horseshoe.jl b/turing/13-sparse_finnish_horseshoe.jl new file mode 100644 index 0000000..a13cd84 --- /dev/null +++ b/turing/13-sparse_finnish_horseshoe.jl @@ -0,0 +1,105 @@ +using Turing +using CSV +using DataFrames +using StatsBase +using LinearAlgebra + +# reproducibility +using Random: seed! + +seed!(123) + +# load data +df = CSV.read("datasets/sparse_regression.csv", DataFrame) + +# define data matrix X and standardize +X = select(df, Not(:y)) |> Matrix |> float +X = standardize(ZScoreTransform, X; dims=1) + +# define dependent variable y and standardize +y = df[:, :y] |> float +y = standardize(ZScoreTransform, y; dims=1) + +# define the model +@model function sparse_finnish_horseshoe_regression( + X, + y; + predictors=size(X, 2), + τ₀=3, + ν_local=1, + ν_global=1, + slab_df=4, + slab_scale=2, +) + # priors + α ~ TDist(3) * 2.5 + z ~ MvNormal(I(predictors)) # standard normal for coefs + σ ~ Exponential(1) + λ ~ filldist(truncated(TDist(ν_local) * 2.5; lower=0), predictors) # local shrinkage + τ ~ (τ₀ * σ) * truncated(TDist(ν_global); lower=0) # global shrinkage + c_aux ~ InverseGamma(0.5 * slab_df, 0.5 * slab_df) + + c = slab_scale * sqrt(c_aux) + λtilde = λ ./ hypot.(1, (τ / c) .* λ) + β = τ .* z .* λtilde # coefficients + + # likelihood + y ~ MvNormal(α .+ X * β, σ^2 * I) + return (; y, α, λ, τ, λtilde, z, c, c_aux, σ, β) +end + +# instantiate the model +model = sparse_finnish_horseshoe_regression(X, y) + +# sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup +chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) + +# results: +# parameters mean std mcse ess_bulk ess_tail rhat ess_per_sec +# Symbol Float64 Float64 Float64 Float64 Float64 Float64 Float64 +# +# α -0.0000 0.0336 0.0004 8823.6551 2625.2448 1.0010 63.6954 +# z[1] 1.1743 0.6211 0.0109 3213.3734 1982.3955 1.0007 23.1964 +# z[2] 0.1741 0.7198 0.0123 3571.7797 2485.5006 1.0011 25.7836 +# z[3] 1.1017 0.5952 0.0096 3728.6517 2509.9228 1.0004 26.9160 +# z[4] 0.2029 0.6915 0.0125 3294.1032 2269.1136 1.0006 23.7792 +# z[5] -1.1319 0.6242 0.0106 3273.5540 2001.8126 1.0007 23.6308 +# z[6] 1.0820 0.5930 0.0099 3335.8259 1514.1053 1.0003 24.0803 +# z[7] 0.3718 0.6854 0.0115 3709.5649 2602.5957 1.0030 26.7783 +# z[8] 1.0127 0.5866 0.0104 3281.1430 2528.0699 1.0009 23.6856 +# z[9] -0.0764 0.7137 0.0114 3946.6966 2386.0961 1.0003 28.4900 +# z[10] 0.3815 0.6643 0.0118 3427.8556 1967.6517 1.0012 24.7447 +# z[11] 0.7501 0.5884 0.0108 3237.0560 2114.0547 1.0011 23.3674 +# z[12] -0.1908 0.7082 0.0112 4037.0612 2465.3878 1.0016 29.1424 +# z[13] -0.1799 0.6934 0.0117 3659.5375 2327.1124 1.0008 26.4171 +# z[14] -1.1706 0.6260 0.0106 2951.8902 2005.4013 1.0019 21.3088 +# z[15] -0.3192 0.6938 0.0117 3701.8526 2415.6241 1.0017 26.7226 +# z[16] -0.1536 0.6923 0.0129 3049.0712 2067.3564 1.0034 22.0103 +# z[17] -0.0948 0.6687 0.0124 2989.3825 2500.1203 1.0015 21.5795 +# z[18] 0.1474 0.6987 0.0113 3903.8315 2364.3293 1.0014 28.1806 +# z[19] -0.0195 0.6944 0.0115 3577.8364 2623.2387 1.0006 25.8273 +# z[20] -1.1287 0.5960 0.0100 3204.0600 2055.6806 1.0022 23.1292 +# σ 0.3270 0.0254 0.0004 4952.0710 2899.5720 0.9998 35.7475 +# λ[1] 63.9949 832.4170 24.9693 2269.0570 1452.5641 0.9999 16.3797 +# λ[2] 2.0061 5.1148 0.0903 2689.4842 2228.4052 1.0002 19.4146 +# λ[3] 18.9900 381.5160 6.1106 2473.8537 2344.7816 1.0013 17.8580 +# λ[4] 1.9434 3.6647 0.0671 2629.0522 2303.8663 1.0003 18.9784 +# λ[5] 18.5162 141.6885 3.1343 2258.5089 1992.2430 1.0011 16.3035 +# λ[6] 15.3717 202.8024 3.9001 2514.2793 1621.6142 1.0004 18.1498 +# λ[7] 2.2426 4.9889 0.1117 2206.7881 2225.5464 1.0025 15.9302 +# λ[8] 6.2598 15.3460 0.3315 2004.0581 2379.0276 1.0012 14.4667 +# λ[9] 1.9147 4.4790 0.0726 2720.5423 1990.4467 0.9998 19.6388 +# λ[10] 2.2991 5.4705 0.0881 2931.8924 2340.0812 1.0000 21.1645 +# λ[11] 4.0614 15.8590 0.2706 2451.3843 1738.1194 1.0008 17.6958 +# λ[12] 2.6250 19.0674 0.5702 2802.5203 2222.0887 1.0020 20.2306 +# λ[13] 7.9188 319.1848 5.7201 2453.5111 1751.0486 0.9999 17.7112 +# λ[14] 33.3954 280.7108 7.2404 1981.8888 1342.0934 1.0028 14.3067 +# λ[15] 2.3651 7.2423 0.1300 2877.8021 2190.4599 1.0000 20.7740 +# λ[16] 1.9382 3.8046 0.0748 2461.9774 2181.2419 1.0019 17.7723 +# λ[17] 2.1877 8.0997 0.1390 2862.7741 2464.6434 1.0004 20.6655 +# λ[18] 1.9594 4.6106 0.0787 2840.8212 1831.3241 1.0010 20.5071 +# λ[19] 2.0235 9.6304 0.1527 2409.5807 1930.8216 1.0004 17.3941 +# λ[20] 18.1218 56.2430 1.6780 2247.6124 1714.5189 1.0025 16.2249 +# τ 0.0428 0.0207 0.0005 1851.0045 2496.6184 1.0011 13.3619 +# c_aux 1.8812 4.9001 0.1314 3930.7145 2055.0016 1.0030 28.3747 diff --git a/turing/14-sparse_r2d2.jl b/turing/14-sparse_r2d2.jl new file mode 100644 index 0000000..347ea1f --- /dev/null +++ b/turing/14-sparse_r2d2.jl @@ -0,0 +1,101 @@ +using Turing +using CSV +using DataFrames +using StatsBase +using LinearAlgebra + +# reproducibility +using Random: seed! + +seed!(123) + +# load data +df = CSV.read("datasets/sparse_regression.csv", DataFrame) + +# define data matrix X and standardize +X = select(df, Not(:y)) |> Matrix |> float +X = standardize(ZScoreTransform, X; dims=1) + +# define dependent variable y and standardize +y = df[:, :y] |> float +y = standardize(ZScoreTransform, y; dims=1) + +# define the model +@model function sparse_r2d2_regression( + X, + y; + predictors=size(X, 2), + mean_R²=0.5, + prec_R²=2, + cons_D2=1, +) + # priors + α ~ TDist(3) * 2.5 + z ~ filldist(Normal(), predictors) + σ ~ Exponential(1) + + R² ~ Beta(mean_R² * prec_R², (1 - mean_R²) * prec_R²) + ϕ ~ Dirichlet(predictors, cons_D2) + τ² = σ^2 * R² / (1 - R²) + + β = z .* sqrt.(ϕ * τ²) + + # likelihood + y ~ MvNormal(α .+ X * β, σ^2 * I) + return (; y, α, τ², R², ϕ, σ, β) +end + +# instantiate the model +model = sparse_r2d2_regression(X, y) + +# sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup +chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) + +# results: +# parameters mean std mcse ess_bulk ess_tail rhat ess_per_sec +# Symbol Float64 Float64 Float64 Float64 Float64 Float64 Float64 +# +# α -0.0000 0.0331 0.0005 4810.7736 2897.9874 1.0011 77.5906 +# z[1] 1.9471 0.5164 0.0108 2324.2468 2497.6268 1.0017 37.4866 +# z[2] 0.1245 0.4540 0.0092 2791.2661 2248.3270 1.0010 45.0190 +# z[3] 1.3798 0.4926 0.0111 2172.1046 2140.8804 1.0008 35.0328 +# z[4] 0.1563 0.4762 0.0103 2747.7218 1445.6867 1.0023 44.3167 +# z[5] -1.4380 0.4741 0.0101 2372.1073 2371.4801 1.0027 38.2586 +# z[6] 1.2682 0.4887 0.0115 1992.7956 2278.4217 1.0002 32.1408 +# z[7] 0.3316 0.4499 0.0105 2336.5928 1745.2343 1.0005 37.6858 +# z[8] 0.9663 0.4515 0.0103 2090.2258 2188.1982 1.0004 33.7122 +# z[9] -0.0916 0.4659 0.0103 2388.7155 1758.8378 1.0026 38.5264 +# z[10] 0.3364 0.4306 0.0090 2567.0167 2250.0817 1.0022 41.4022 +# z[11] 0.6615 0.4337 0.0096 2477.2456 1968.5712 1.0011 39.9543 +# z[12] -0.1690 0.4355 0.0089 2617.9076 1908.0778 1.0015 42.2230 +# z[13] -0.1468 0.4400 0.0096 2287.1862 1564.5962 1.0011 36.8889 +# z[14] -1.7863 0.5031 0.0105 2296.8729 2411.8110 1.0003 37.0451 +# z[15] -0.3287 0.4700 0.0118 1861.3255 1528.6080 1.0009 30.0204 +# z[16] -0.1205 0.4489 0.0102 2273.5535 1642.6176 1.0044 36.6690 +# z[17] -0.1267 0.4418 0.0090 2739.7594 1874.8147 1.0009 44.1882 +# z[18] 0.1688 0.4567 0.0097 2550.0217 1904.7321 1.0022 41.1281 +# z[19] -0.0199 0.4536 0.0101 2480.7957 1820.9155 1.0015 40.0115 +# z[20] -1.6009 0.4987 0.0103 2563.1642 2283.9908 1.0015 41.3400 +# σ 0.3346 0.0267 0.0005 2948.2022 2761.5225 1.0004 47.5501 +# R² 0.8390 0.0540 0.0015 1231.3745 1913.9550 1.0010 19.8602 +# ϕ[1] 0.1204 0.0555 0.0010 2333.1188 2513.7715 1.0018 37.6297 +# ϕ[2] 0.0300 0.0367 0.0006 2518.7483 2135.0483 1.0016 40.6237 +# ϕ[3] 0.0743 0.0474 0.0009 2504.7310 2406.8258 1.0015 40.3976 +# ϕ[4] 0.0300 0.0351 0.0006 1981.0706 1398.5553 1.0015 31.9517 +# ϕ[5] 0.0807 0.0473 0.0008 2783.2044 2307.1676 1.0013 44.8889 +# ϕ[6] 0.0669 0.0445 0.0008 2398.9212 2374.6042 1.0008 38.6910 +# ϕ[7] 0.0313 0.0353 0.0006 2118.6381 1571.9491 1.0015 34.1705 +# ϕ[8] 0.0496 0.0407 0.0008 2275.6622 2287.6915 1.0021 36.7030 +# ϕ[9] 0.0305 0.0374 0.0006 2285.8077 1949.9086 1.0016 36.8667 +# ϕ[10] 0.0314 0.0348 0.0005 2562.6691 1670.6879 1.0008 41.3320 +# ϕ[11] 0.0403 0.0389 0.0006 2432.1167 2037.7906 0.9997 39.2264 +# ϕ[12] 0.0310 0.0349 0.0005 2817.5779 2002.4166 1.0006 45.4433 +# ϕ[13] 0.0302 0.0359 0.0006 2301.1932 1914.0317 1.0008 37.1148 +# ϕ[14] 0.1125 0.0550 0.0010 3046.9685 2865.5752 1.0004 49.1431 +# ϕ[15] 0.0313 0.0344 0.0006 2488.5343 2209.6063 0.9997 40.1364 +# ϕ[16] 0.0297 0.0349 0.0006 2590.9197 2303.3675 1.0015 41.7877 +# ϕ[17] 0.0302 0.0336 0.0006 2726.4569 2548.4165 0.9996 43.9737 +# ϕ[18] 0.0305 0.0365 0.0006 2251.9325 1758.6762 1.0029 36.3203 +# ϕ[19] 0.0288 0.0336 0.0006 2342.6613 2457.5977 1.0016 37.7836 +# ϕ[20] 0.0903 0.0493 0.0008 3297.5508 2547.3947 1.0002 53.1846 diff --git a/turing/11-hierarchical_varying_intercept-cheese.jl b/turing/15-hierarchical_varying_intercept-cheese.jl similarity index 98% rename from turing/11-hierarchical_varying_intercept-cheese.jl rename to turing/15-hierarchical_varying_intercept-cheese.jl index c026bf7..01db0ac 100644 --- a/turing/11-hierarchical_varying_intercept-cheese.jl +++ b/turing/15-hierarchical_varying_intercept-cheese.jl @@ -57,6 +57,7 @@ model = varying_intercept_regression(X, idx, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/11-hierarchical_varying_intercept-non_centered-cheese.jl b/turing/15-hierarchical_varying_intercept-non_centered-cheese.jl similarity index 98% rename from turing/11-hierarchical_varying_intercept-non_centered-cheese.jl rename to turing/15-hierarchical_varying_intercept-non_centered-cheese.jl index de88b2f..787ba85 100644 --- a/turing/11-hierarchical_varying_intercept-non_centered-cheese.jl +++ b/turing/15-hierarchical_varying_intercept-non_centered-cheese.jl @@ -58,6 +58,7 @@ model = varying_intercept_ncp_regression(X, idx, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters with 1k warmup chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # parameters mean std naive_se mcse ess rhat ess_per_sec diff --git a/turing/12-hierarchical_varying_intercept_slope-cheese.jl b/turing/16-hierarchical_varying_intercept_slope-cheese.jl similarity index 99% rename from turing/12-hierarchical_varying_intercept_slope-cheese.jl rename to turing/16-hierarchical_varying_intercept_slope-cheese.jl index c3573db..1d33461 100644 --- a/turing/12-hierarchical_varying_intercept_slope-cheese.jl +++ b/turing/16-hierarchical_varying_intercept_slope-cheese.jl @@ -66,6 +66,7 @@ model = correlated_varying_intercept_slope_regression(X, idx, y) # sample with NUTS, 4 multi-threaded parallel chains, and 2k iters chn = sample(model, NUTS(1_000, 0.8), MCMCThreads(), 1_000, 4) +println(DataFrame(summarystats(chn))) # results: # Summary Statistics diff --git a/turing/13-model_comparison-roaches.jl b/turing/17-model_comparison-roaches.jl similarity index 99% rename from turing/13-model_comparison-roaches.jl rename to turing/17-model_comparison-roaches.jl index 579f51d..aea7479 100644 --- a/turing/13-model_comparison-roaches.jl +++ b/turing/17-model_comparison-roaches.jl @@ -74,8 +74,8 @@ loo_negative_binomial = loo(model_negative_binomial, chn_negative_binomial) comparison = loo_compare((; Poisson=loo_poisson, NegativeBinomial=loo_negative_binomial -) -) +)) +display(comparison) # Results #┌──────────────────┬──────────┬────────┬────────┐