Skip to content

Update VI quickstart and VI LDA notebooks #228

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 99 additions & 0 deletions examples/references.bib
Original file line number Diff line number Diff line change
@@ -1,3 +1,33 @@
@article{blei2003lda,
abstract = {We describe latent Dirichlet allocation (LDA), a generative probabilistic model for collections of discrete data such as text corpora. LDA is a three-level hierarchical Bayesian model, in which each item of a collection is modeled as a finite mixture over an underlying set of topics. Each topic is, in turn, modeled as an infinite mixture over an underlying set of topic probabilities. In the context of text modeling, the topic probabilities provide an explicit representation of a document. We present efficient approximate inference techniques based on variational methods and an EM algorithm for empirical Bayes parameter estimation. We report results in document modeling, text classification, and collaborative filtering, comparing to a mixture of unigrams model and the probabilistic LSI model.},
added-at = {2010-03-23T16:09:41.000+0100},
author = {Blei, David M. and Ng, Andrew Y. and Jordan, Michael I.},
biburl = {https://www.bibsonomy.org/bibtex/2bc34cc810fa7dfa12b949b60c23d9f5c/zhenzhenx},
description = {Latent dirichlet allocation},
doi = {http://dx.doi.org/10.1162/jmlr.2003.3.4-5.993},
interhash = {9d1b808272b9e511425cbf557571e59a},
intrahash = {bc34cc810fa7dfa12b949b60c23d9f5c},
issn = {1532-4435},
journal = {J. Mach. Learn. Res.},
keywords = {LDA allocation dirichlet latent},
pages = {993--1022},
publisher = {JMLR.org},
timestamp = {2010-06-16T11:05:42.000+0200},
title = {Latent dirichlet allocation},
url = {http://portal.acm.org/citation.cfm?id=944937},
volume = 3,
year = 2003
}

@misc{blundell2015weight,
title={Weight Uncertainty in Neural Networks},
author={Charles Blundell and Julien Cornebise and Koray Kavukcuoglu and Daan Wierstra},
year={2015},
eprint={1505.05424},
archivePrefix={arXiv},
primaryClass={stat.ML}
}

@book{gelman2006data,
title={Data analysis using regression and multilevel/hierarchical models},
author={Gelman, Andrew and Hill, Jennifer},
Expand All @@ -16,6 +46,41 @@ @article{gelman2006multilevel
publisher={Taylor \& Francis}
}


@article{hoffman2013stochasticvi,
author = {Matthew D. Hoffman and David M. Blei and Chong Wang and John Paisley},
title = {Stochastic Variational Inference},
journal = {Journal of Machine Learning Research},
year = {2013},
volume = {14},
number = {4},
pages = {1303-1347},
url = {http://jmlr.org/papers/v14/hoffman13a.html}
}


@misc{kingma2014autoencoding,
title={Auto-Encoding Variational Bayes},
author={Diederik P Kingma and Max Welling},
year={2014},
eprint={1312.6114},
archivePrefix={arXiv},
primaryClass={stat.ML}
}


@article{kucukelbir2017advi,
author = {Alp Kucukelbir and Dustin Tran and Rajesh Ranganath and Andrew Gelman and David M. Blei},
title = {Automatic Differentiation Variational Inference},
journal = {Journal of Machine Learning Research},
year = {2017},
volume = {18},
number = {14},
pages = {1-45},
url = {http://jmlr.org/papers/v18/16-107.html}
}


@book{mcelreath2018statistical,
title={Statistical rethinking: A Bayesian course with examples in R and Stan},
author={McElreath, Richard},
Expand All @@ -24,3 +89,37 @@ @book{mcelreath2018statistical
}


@misc{rezende2016variational,
title={Variational Inference with Normalizing Flows},
author={Danilo Jimenez Rezende and Shakir Mohamed},
year={2016},
eprint={1505.05770},
archivePrefix={arXiv},
primaryClass={stat.ML}
}


@InProceedings{salimans2015mcmcandvi,
title={Markov Chain Monte Carlo and Variational Inference: Bridging the Gap},
author={Salimans, Tim and Kingma, Diederik and Welling, Max},
booktitle={Proceedings of the 32nd International Conference on Machine Learning},
pages={1218--1226},
year={2015},
editor={Bach, Francis and Blei, David},
volume={37},
series={Proceedings of Machine Learning Research},
address={Lille, France},
month={07--09 Jul},
publisher={PMLR},
pdf={http://proceedings.mlr.press/v37/salimans15.pdf},
url={https://proceedings.mlr.press/v37/salimans15.html},
abstract={Recent advances in stochastic gradient variational inference have made it possible to perform variational Bayesian inference with posterior approximations containing auxiliary random variables. This enables us to explore a new synthesis of variational inference and Monte Carlo methods where we incorporate one or more steps of MCMC into our variational approximation. By doing so we obtain a rich class of inference algorithms bridging the gap between variational methods and MCMC, and offering the best of both worlds: fast posterior approximation through the maximization of an explicit objective, with the option of trading off additional computation for additional accuracy. We describe the theoretical foundations that make this possible and show some promising first results.}
}


@misc{sklearn2020topicextraction,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this entry is rendered a bit strangely right now, I think we should add an author like scikit-devs or something of the sort so the inline citation looks better. Or it might be also possible to use a custom text when citing, maybe with type cite:label I think it is (do check the docs please)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

does the rendered citation look better now?

title = {Topic extraction with Non-negative Matrix Factorization and Latent Dirichlet Allocation},
author={Scikit-learn Developers},
howpublished = {\url{https://scikit-learn.org/stable/auto_examples/applications/plot_topics_extraction_with_nmf_lda.html}},
note = {Accessed: 2021-09-15}
}
Loading