-
Notifications
You must be signed in to change notification settings - Fork 16
/
cvxr.bib
680 lines (609 loc) · 22.3 KB
/
cvxr.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
@book{BoydVandenberghe:2004,
author = {S. Boyd and L. Vandenberghe},
title = {Convex Optimization},
publisher = {Cambridge University Press},
year = {2004},
}
@book{ESL,
title = {The Elements of Statistical Learning},
author = {T. Hastie and R. Tibshirani and J. Friedman},
publisher = {Springer-Verlag},
year = {2001}
}
% CVX like systems
@misc{cvx,
author = {M. Grant and S. Boyd},
title = {{CVX}: {MATLAB} Software for Disciplined Convex Programming, version 2.1},
month = {March},
year = {2014},
howpublished = {\url{http://cvxr.com/cvx}}
}
@article{cvxpy,
Author = {S. Diamond and S. Boyd},
Title = {{CVXPY}: A {Python}-embedded modeling language for
convex optimization},
Journal = {Journal of Machine Learning Research},
Year = {2016},
Volume = {17},
Number = {83},
Pages = {1--5},
}
@inproceedings{cvxjl,
author = {M. Udell and K. Mohan and D. Zeng and J. Hong and S. Diamond and S. Boyd},
title = {Convex optimization in {J}ulia},
booktitle = {Proceedings of the Workshop for High Performance Technical Computing in Dynamic Languages},
year = 2014,
pages = {18--28},
location = {New Orleans},
}
@inproceedings{YALMIP,
author = {J. Lofberg},
title = {{YALMIP}: A Toolbox for Modeling and Optimization in {MATLAB}},
booktitle = {Proceedings of the {IEEE} International Symposium on Computed Aided Control Systems Design},
year = {2004},
month = {September},
pages = {294--289},
note = {\url{http://users.isy.liu.se/johanl/yalmip}}
}
@misc{cvxcanon,
author = {J. Miller and P. Quigley and J. Zhu},
title = {{CVX}canon: Automatic Canonicalization of Disciplined Convex Programs},
month = {June},
year = {2015},
howpublished = {\url{https://stanford.edu/class/ee364b/projects/2015projects/reports/miller_quigley_zhu_report.pdf}}
}
% Related R libraries
@article{glmnet,
Author = {J. Friedman and T. Hastie and R. Tibshirani},
Title = {Regularization Paths for Generalized Linear Models via Coordinate Descent},
Journal = {Journal of Statistical Software},
Year = {2010},
Month = {January},
Volume = {33},
Number = {1},
Pages = {1--22},
}
@article{optimx,
title = {Unifying Optimization Algorithms to Aid Software System Users: {optimx} for {R}},
author = {J. C. Nash and R. Varadhan},
journal = {Journal of Statistical Software},
year = {2011},
volume = {43},
number = {9},
pages = {1--14},
}
@article{ROI,
title = {{ROI}: The {R} Optimization Infrastructure Package},
author = {S. Theu{\ss}l and F. Schwendinger and K. Hornik},
journal = {Research Report Series / Department of Statistics and Mathematics},
year = {2017},
volume = {133},
note = {WU Vienna University of Economics and Business, Vienna}
}
@manual{nloptr,
title = {The {NLopt} Nonlinear-Optimization Package},
author = {S. G. Johnson},
year = {2008},
note = {{http://ab-initio.mit.edu/nlopt/}}
}
@article{Nash:2014,
title = {On Best Practice Optimization Methods in {R}},
author = {J. C. Nash},
journal = {Journal of Statistical Software},
year = {2014},
volume = {60},
number = {2},
pages = {1--14},
}
% Convexity detection
@incollection{GrantBoydYe:2006,
author = {M. Grant and S. Boyd and Y. Ye},
title = {Disciplined Convex Programming},
booktitle = {Global Optimization: From Theory to Implementation},
series = {Nonconvex Optimization and its Applications},
pages = {155--210},
publisher = {Springer-Verlag},
year = {2006},
editor = {L. Liberti and N. Maculan}
}
@article{DGP,
author = {A. Agrawal and S. Diamond and S. Boyd},
title = {Disciplined Geometric Programming},
journal = {Optimization Letters},
year = {2019},
volume = {13},
number = {5},
pages = {961--976}
}
% ECOS
@inproceedings{ECOS,
author = {A. Domahidi and E. Chu and S. Boyd},
booktitle = {Proceedings of the European Control Conference},
title = {{ECOS}: {A}n {SOCP} solver for embedded systems},
year = {2013},
pages = {3071--3076}
}
@inproceedings{QCML,
title = {Code generation for embedded second-order cone programming},
author = {E. Chu and N. Parikh and A. Domahidi and S. Boyd},
booktitle = {Proceedings of the European Control Conference},
pages = {1547--1552},
year = {2013}
}
% SCS
@article{SCS,
author = {B. O'Donoghue and E. Chu and N. Parikh and S. Boyd},
title = {Conic Optimization via Operator Splitting
and Homogeneous Self-Dual Embedding},
journal = {Journal of Optimization Theory and Applications},
pages = {1--27},
year = {2016}
}
@inproceedings{DCSP,
author = {A. Ali and Z. Kolter and S. Diamond and S. Boyd},
title = {Disciplined Convex Stochastic Programming: A New Framework for Stochastic Optimization},
booktitle = {Proceedings of the Conference on Uncertainty in Artifial Intelligence},
pages = {62--71},
year = {2015}
}
@incollection{MOSEK,
author = {E. D. Andersen and K. D. Andersen},
title = {The {MOSEK} Interior Point Optimizer for Linear Programming: An Implementation of the Homogeneous Algorithm},
booktitle = {High Performance Optimization},
pages = {197--232},
year = {2000},
publisher = {Springer-Verlag}
}
@manual{GUROBI,
author = {{Gurobi Optimization, Inc}},
title = {Gurobi Optimizer Reference Manual},
year = {2016},
note = {\url{http://www.gurobi.com}}
}
% Optimization algorithms
@book{Wright:1997,
author = {S. J. Wright},
title = {Primal-dual Interior-Point Methods},
year = {1997},
publisher = {SIAM}
}
@article{AndersenDahl:2011,
author = {M. Andersen and J. Dahl and Z. Liu and L. Vandenberghe},
title = {Interior-Point Methods for Large-Scale Cone Programming},
journal = {Optimization for machine learning},
volume = {5583},
year = {2011},
publisher = {MIT}
}
@article{SkajaaYe:2015,
author = {A. Skajaa and Y. Ye},
title = {A Homogeneous Interior-Point Algorithm for Nonsymmetric Convex Conic Optimization},
journal = {Mathematical Programming},
volume = {150},
number = {2},
pages = {391--422},
year = {2015},
publisher = {Springer-Verlag}
}
@article{ADMM,
author = {S. Boyd and N. Parikh and E. Chu and B. Peleato and J. Eckstein},
title = {Distributed Optimization and Statistical Learning via the Alternating Direction Method of Multipliers},
journal = {Foundations and Trends in Machine Learning},
volume = {3},
number = {1},
pages = {1--122},
year = {2011}
}
% Examples
@article{Huber:1964,
title = {Robust Estimation of a Location Parameter},
author = {P. J. Huber},
journal = {Annals of Mathematical Statistics},
volume = {35},
number = {1},
pages = {73--101},
year = {1964}
}
@book{quantile,
title = {Quantile Regression},
author = {R. Koenker},
publisher = {Cambridge University Press},
year = {2005}
}
@article{Tibshirani:1996,
title = {Regression Shrinkage and Selection via the {L}asso},
author = {R. Tibshirani},
journal = {Journal of the Royal Statistical Society B (Methodological)},
volume = {58},
number = {1},
pages = {267--288},
year = {1996}
}
@article{elasticnet,
title = {Regularization and Variable Selection via the Elastic--Net},
author = {H. Zou, T. Hastie},
journal = {Journal of the Royal Statistical Society B (Methodological)},
volume = {67},
number = {2},
pages = {301--320},
year = {2005}
}
@article{Cox:1958,
title = {The Regression Analysis of Binary Sequences},
author = {D. R. Cox},
journal = {Journal of the Royal Statistical Society B (Methodological)},
volume = {20},
number = {2},
pages = {215--242},
year = {1958}
}
@article{YuanLin:2007,
title = {Model Selection and Estimation in the {G}aussian Graphical Model},
author = {M. Yuan and Y. Lin},
journal = {Biometrika},
volume = {94},
number = {1},
pages = {19--35},
year = {2007}
}
@article{Banerjee:2008,
Author = {O. Banerjee and L. {E}l {G}haoui and A. {d'A}spremont},
Journal = {Journal of Machine Learning Research},
Pages = {485-516},
Title = {Model Selection Through Sparse Maximum Likelihood
Estimation for Multivariate Gaussian or Binary Data},
Volume = {9},
Month = {March},
Year = {2008}
}
@article{spinvcov,
title = {Sparse Inverse Covariance Estimation with the Graphical Lasso},
author = {J. Friedman and T. Hastie and R. Tibshirani},
journal = {Biostatistics},
volume = {9},
number = {3},
pages = {432--441},
year = {2008}
}
@article{BoydHastie:2016,
title = {Saturating Splines and Feature Selection},
author = {N. Boyd and T. Hastie and S. Boyd and B. Recht and M. Jordan},
journal = {arXiv preprint arXiv:1609.06764},
year = {2016}
}
@article{mlelogcave,
title = {Maximum Likelihood Estimation of a Log-Concave Density and its Distribution Function: Basic Properties and Uniform Consistency},
author = {L. D{\"u}mbgen and K. Rufibach},
journal = {Bernoulli},
volume = {15},
number = {1},
pages = {40--68},
year = {2009}
}
@book{dirstd,
title = {Statistical Methods for Rates and Proportions},
author = {J. L. Fleiss and B. Levin and M. C. Paik},
publisher = {John Wiley \& Sons},
month = {September},
year = {2003}
}
@article{TibshiraniHoefling:2011,
title = {Nearly-Isotonic Regression},
author = {R. J. Tibshirani and H. Hoefling and R. Tibshirani},
journal = {Technometrics},
volume = {53},
number = {1},
pages = {54--61},
year = {2011}
}
@misc{worstrisk,
title = {The Worst-Case Risk of a Portfolio},
author = {M. Lobo and S. Boyd},
month = {September},
year = {2000},
howpublished = {\url{http://stanford.edu/~boyd/papers/pdf/risk_bnd.pdf}}
}
@article{catenary,
title = {Case Studies in Optimization: Catenary Problem},
author = {I. A. Griva and R. J. Vanderbei},
journal = {Optimization and Engineering},
volume = {6},
number = {4},
pages = {463--482},
year = {2005},
publisher = {Springer-Verlag}
}
@article{Markowitz:1952,
title = {Portfolio Selection},
author = {H. M. Markowitz},
journal = {Journal of Finance},
volume = {7},
number = {1},
pages = {77--91},
month = {March},
year = {1952}
}
@article{Roy:1952,
title = {Safety First and the Holding of Assets},
author = {A. D. Roy},
journal = {Econometrica},
volume = {20},
number = {3},
pages = {431--449},
month = {July},
year = {1952}
}
@article{LoboFazelBoyd:2007,
title = {Portfolio Optimization with Linear and Fixed Transaction Costs},
author = {M. S. Lobo and M. Fazel and S. Boyd},
journal = {Annals of Operations Research},
volume = {152},
number = {1},
pages = {341--365},
year = {2007}
}
@article{BoydBusseti:2017,
title = {Multi-Period Trading via Convex Optimization},
author = {S. Boyd and E. Busseti and S. Diamond and R. N. Kahn and K. Koh and P. Nystrup and J. Speth},
journal = {Foundations and Trends in Optimization},
year = {2017}
}
@article{kelly,
title = {A New Interpretation of Information Rate},
author = {J. L. Kelly},
journal = {Bell System Technical Journal},
volume = {35},
number = {4},
pages = {917--926},
month = {July},
year = {1956}
}
@article{RCK,
title = {Risk--Constrained {K}elly Gambling},
author = {E. Busseti and E. K. Ryu and S. Boyd},
journal = {Journal of Investing},
volume = {25},
number = {3},
pages = {118--134},
year = {2016}
}
@article{FMMC,
title = {Fastest Mixing {M}arkov Chain on a Graph},
author = {S. Boyd and P. Diaconis and L. Xiao},
journal = {SIAM Review},
volume = {46},
number = {4},
pages = {667--689},
year = {2004},
publisher = {SIAM}
}
@book{ShannonWeaver:1949,
title = {The Mathematical Theory of Communication},
author = {C. E. Shannon and W. Weaver},
publisher = {University of Illinois Press},
year = {1949}
}
@book{GelfandFomin:1963,
title = {Calculus of Variations},
author = {I. M. Gelfand and S. V. Fomin},
publisher = {Prentice-Hall},
year = {1963}
}
@book{Freedman:2009,
title = {Statistical Models: Theory and Practice},
author = {D. A. Freedman},
publisher = {Cambridge University Press},
year = {2009}
}
@article{isotone,
title = {Isotone Optimization in {R}: Pool-Adjacent-Violators Algorithm ({PAVA}) and Active Set Methods},
author = {J. {de Leeuw} and K. Hornik and P. Mair},
journal = {Journal of Statistical Software},
year = {2009},
volume = {32},
number = {5},
pages = {1--24},
}
@article{neariso,
title = {Nearly-Isotonic Regression},
author = {R. J. Tibshirani and H. Hoefling and R. Tibshirani},
journal = {Technometrics},
volume = {53},
number = {1},
pages = {54--61},
year = {2011},
publisher = {Taylor & Francis},
}
@article{PotthoffRoy:1964,
title = {A Generalized Multivariate Analysis of Variance Model Useful Especially for Growth Curve problems},
author = {R. F. Potthoff and S. N. Roy},
journal = {Biometrika},
volume = {51},
number = {3--4},
pages = {313--326},
year = {1964},
}
% Languages
@Manual{r:2018,
title = {R: A Language and Environment for Statistical Computing},
author = {{R Core Team}},
organization = {R Foundation for Statistical Computing},
address = {Vienna, Austria},
year = {2018},
url = {https://www.R-project.org/},
}
% Optimization Task View
@Misc{cran:opt,
author = {S. Theussl and H. W. Borchers},
note = {Version~2018-09-08},
title = {{CRAN} Task View: Optimization and Mathematical Programming},
year = {2014},
url = {http://CRAN.R-project.org/view=Optimzation}
}
@article{koenker:mizera:2014,
author = {R. Koenker and I. Mizera},
title = {Convex Optimization in {R}},
journal = {Journal of Statistical Software, Articles},
volume = {60},
number = {5},
year = {2014},
keywords = {},
abstract = {Convex optimization now plays an essential role in many facets of statistics. We briefly survey some recent developments and describe some implementations of these methods in R . Applications of linear and quadratic programming are introduced including quantile regression, the Huber M-estimator and various penalized regression methods. Applications to additively separable convex problems subject to linear equality and inequality constraints such as nonparametric density estimation and maximum likelihood estimation of general nonparametric mixture models are described, as are several cone programming problems. We focus throughout primarily on implementations in the R environment that rely on solution methods linked to R, like MOSEK by the package Rmosek. Code is provided in R to illustrate several of these problems. Other applications are available in the R package REBayes, dealing with empirical Bayes estimation of nonparametric mixture models.},
issn = {1548-7660},
pages = {1--23},
doi = {10.18637/jss.v060.i05},
url = {https://www.jstatsoft.org/v060/i05}
}
@article{gentleman:geyer:1981,
author = {R. Gentleman and C. J. Geyer},
title = {Maximum Likelihood for Interval Censored data: Consistency and Computation},
journal = {Biometrika},
volume = {81},
number = {3},
pages = {618-623},
year = {1994},
doi = {10.1093/biomet/81.3.618},
URL = {http://dx.doi.org/10.1093/biomet/81.3.618},
eprint = {/oup/backfile/content_public/journal/biomet/81/3/10.1093/biomet/81.3.618/2/81-3-618.pdf}
}
@Article{deumbgen:rufibach:2011,
title = {{logcondens}: Computations Related to Univariate Log-Concave Density Estimation},
author = {L. D\"umbgen and K. Rufibach},
journal = {Journal of Statistical Software},
year = {2011},
volume = {39},
number = {6},
pages = {1--28},
url = {http://www.jstatsoft.org/v39/i06/},
}
@article{reductions:2018,
author = {A. Agrawal and R. Verschueren and S. Diamond and S. Boyd},
title = {A Rewriting System for Convex Optimization Problems},
journal = {Journal of Control and Decision},
volume = {5},
number = {1},
pages = {42-60},
year = {2018},
publisher = {Taylor & Francis},
doi = {10.1080/23307706.2017.1397554},
URL = {https://doi.org/10.1080/23307706.2017.1397554},
eprint = {https://doi.org/10.1080/23307706.2017.1397554}
}
@Misc{credit,
author = {{Mathworks Inc}},
title = {Using Credit Scorecards with Constrained Logistic Regression Coefficients},
URL = {https://www.mathworks.com/help/finance/examples/credit-scorecards-with-constrained-logistic-regression-coefficients.html},
year = {2018}
}
@Article{rcpp,
title = {{Rcpp}: Seamless {R} and {C++} Integration},
author = {Dirk Eddelbuettel and Romain Fran\c{c}ois},
journal = {Journal of Statistical Software},
year = {2011},
volume = {40},
number = {8},
pages = {1--18},
url = {http://www.jstatsoft.org/v40/i08/},
doi = {10.18637/jss.v040.i08},
}
@Manual{Rmosek,
title = {Rmosek: The {R} to {MOSEK} Optimization Interface},
author = {{MOSEK ApS}},
year = {2017},
note = {http://rmosek.r-forge.r-project.org/, http://www.mosek.com/},
}
@article{su-li-shi:2016,
author = {L. Su and Z. Shi and P. C. B. Phillips},
title = {Identifying Latent Structures in Panel Data},
journal = {Econometrica},
volume = {84},
number = {6},
pages = {2215-2264},
year = {2016},
keywords = {Classification, cluster analysis, dynamic panel, group Lasso, high dimensionality, nonlinear panel, oracle property, panel structure model, parameter heterogeneity, penalized least squares, penalized GMM, penalized profile likelihood},
doi = {10.3982/ECTA12560},
url = {https://onlinelibrary.wiley.com/doi/abs/10.3982/ECTA12560},
eprint = {https://onlinelibrary.wiley.com/doi/pdf/10.3982/ECTA12560},
abstract = {This paper provides a novel mechanism for identifying and estimating latent group structures in panel data using penalized techniques. We consider both linear and nonlinear models where the regression coefficients are heterogeneous across groups but homogeneous within a group and the group membership is unknown. Two approaches are considered—penalized profile likelihood (PPL) estimation for the general nonlinear models without endogenous regressors, and penalized GMM (PGMM) estimation for linear models with endogeneity. In both cases, we develop a new variant of Lasso called classifier-Lasso (C-Lasso) that serves to shrink individual coefficients to the unknown group-specific coefficients. C-Lasso achieves simultaneous classification and consistent estimation in a single step and the classification exhibits the desirable property of uniform consistency. For PPL estimation, C-Lasso also achieves the oracle property so that group-specific parameter estimators are asymptotically equivalent to infeasible estimators that use individual group identity information. For PGMM estimation, the oracle property of C-Lasso is preserved in some special cases. Simulations demonstrate good finite-sample performance of the approach in both classification and estimation. Empirical applications to both linear and nonlinear models are presented.}
}
@Article{zhentao:2018,
title = {Two Examples of Convex-Programming-Based High-Dimensional Econometric Estimators},
author = {Z. Gao and Z. Shi},
journal = {arXiv preprint arXiv:1806.10423v1},
url = {https://arxiv.org/abs/1806.10423v1},
year = {2018}
}
@misc{zhanshi:2018,
author = {Z. Gao},
title = {Two Examples of Convex-Programming-Based High-Dimensional Econometric Estimators},
year = {2018},
publisher = {GitHub},
journal = {GitHub repository},
howpublished = {\url{https://github.com/zhan-gao/convex_prog_in_econometrics}},
commit = {a2d202847ae72645a953b3cf6dc447b7784d29c9}
}
@Manual{bates:martin:2018,
title = {Matrix: Sparse and Dense Matrix Classes and Methods},
author = {D. Bates and M. Maechler},
year = {2018},
note = {R package version 1.2-14},
url = {https://CRAN.R-project.org/package=Matrix},
}
@Inbook{Davies:2016,
author = {G. Davies and J. Gillard and A. Zhigljavsky},
title = {Comparative Study of Different Penalty Functions and Algorithms in Survey Calibration},
bookTitle = {Advances in Stochastic and Deterministic Global Optimization},
year = {2016},
publisher = {Springer-Verlag},
address = {Cham},
pages = {87--127},
abstract = {The technique of calibration in survey sampling is a widely used technique in the field of official statistics. The main element of the calibration process is an optimization procedure, for which a variety of penalty functions can be used. In this chapter, we consider three of the classical penalty functions that are implemented in many of the standard calibration software packages. We present two algorithms used by many of these software packages, and explore the properties of the calibrated weights and the corresponding estimates when using these two algorithms with the classical calibration penalty functions.},
isbn = {978-3-319-29975-4},
doi = {10.1007/978-3-319-29975-4_6},
url = {https://doi.org/10.1007/978-3-319-29975-4_6}
}
@Misc{lumley:2018,
author = {Thomas Lumley},
year = {2018},
title = {{survey}: Analysis of Complex Survey Samples},
note = {R package version 3.34},
}
@book{Lumley:2010,
author = {Lumley, Thomas S.},
title = {Complex Surveys: A Guide to Analysis Using R},
year = {2010},
isbn = {0470284307, 9780470284308},
publisher = {John Wiley \& Sons}
}
@Article{tibsjhf:2017,
author = {Robert J. Tibshirani and Jerome H. Friedman},
title = {A Pliable Lasso},
year = {2017},
journal = {arXiv preprint arxiv:1712.00484},
url = {https://arxiv.org/abs/1712.00484}
}
@book{james2013,
Author = {James, Gareth and Witten, Daniela and Hastie, Trevor and Tibshirani, Robert},
Booktitle = {An Introduction to Statistical Learning},
Date-Modified = {2015-07-26 12:59:11 pm +0000},
Doi = {10.1007/978-1-4614-7138-7},
Isbn = {978-1-4614-7137-0},
Language = {English},
Publisher = {Springer New York},
Series = {Springer Texts in Statistics},
Title = {An Introduction to Statistical Learning: with Applications in R},
Url = {http://doi.org/10.1007/978-1-4614-7138-7},
Year = {2013},
Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-1-4614-7138-7_5}}
@article{ryang:2018,
title = {Density Deconvolution with Additive Measurement Errors using Quadratic Programming},
author = {R. Yang and D. Apley, and J. Staum and D. Ruppert},
journal = {arXiv preprint arXiv:1812.01786},
year = {2018}
}
@article{fu:naras:boyd:2019,
title = {CVXR: An R Package for Disciplined Convex Optimization},
author = {A. Fu and B. Narasimhan, and Stephen Boyd},
journal = {Journal of Statistical Software (to appear)},
year = {2019}
}