@techreport{wang2008lda, author = {Wang, Yi}, interhash = {4c363a5b79efeee4e46a15cf7f85deac}, intrahash = {08b13d141b85fc0002bd6ec6cab9d18f}, title = {Distributed Gibbs Sampling of Latent Topic Models: The Gritty Details}, year = 2008 } @article{an1984stochastic, author = {AN, S.G.E.M. and AN, D.G.E.M.}, interhash = {49a6800007b066b9886e043eb7d25642}, intrahash = {6aed52c1277e7cf789c06994f77fc778}, journal = {IEEE Trans. Pattern Anal. Machine Intell}, pages = {721--741}, title = {{Stochastic relaxation, Gibbs distributions, and the Bayesian restoration of images}}, url = {http://scholar.google.de/scholar.bib?q=info:E_YCl1NmoesJ:scholar.google.com/&output=citation&hl=de&ct=citation&cd=0}, volume = 6, year = 1984 } @techreport{griffiths02, author = {Griffiths, Tom}, bdsk-url-1 = {www-psych.stanford.edu/~gruffydd/cogsci02/lda.ps}, institution = {Stanford University}, interhash = {6eb20464c0bac4a7081aa6e3f318503a}, intrahash = {9b9261755a207a91b2f646e79cd5f83c}, owner = {heinrich}, timestamp = {2009.04.07}, title = {Gibbs sampling in the generative model of {L}atent {D}irichlet {A}llocation}, url = {/brokenurl#www-psych.stanford.edu/~gruffydd/cogsci02/lda.ps}, year = 2002 } @misc{walsh2004, abstract = {A major limitation towards more widespread implementation of Bayesian approaches is that obtaining the posterior distribution often requires the integration of high-dimensional functions. This can be computationally very difficult, but several approaches short of direct integration have been proposed (reviewed by Smith 1991, Evans and Swartz 1995, Tanner 1996). We focus here on Markov Chain Monte Carlo (MCMC) methods, which attempt to simulate direct draws from some complex distribution of interest. MCMC approaches are so-named because one uses the previous sample values to randomly generate the next sample value, generating a Markov chain (as the transition probabilities between sample values are only a function of the most recent sample value). The realization in the early 1990’s (Gelfand and Smith 1990) that one particular MCMC method, the Gibbs sampler, is very widely applicable to a broad class of Bayesian problems has sparked a major increase in the application of Bayesian analysis, and this interest is likely to continue expanding for sometime to come. MCMC methods have their roots in the Metropolis algorithm (Metropolis and Ulam 1949, Metropolis et al. 1953), an attempt by physicists to compute complex integrals by expressing them as expectations for some distribution and then estimate this expectation by drawing samples from that distribution. The Gibbs sampler (Geman and Geman 1984) has its origins in image processing. It is thus somewhat ironic that the powerful machinery ofMCMCmethods had essentially no impact on the field of statistics until rather recently. Excellent (and detailed) treatments of MCMC methods are found in Tanner (1996) and Chapter two of Draper (2000). Additional references are given in the particular sections below.}, author = {Walsh, B.}, booktitle = {Lecture Notes for EEB 581, version 26}, citeulike-article-id = {405095}, interhash = {dd9709f5a7d02b5eaab31aa307b9b0e0}, intrahash = {84f2f6ddcdf8bb5387656640c6782529}, month = {April}, priority = {0}, title = {Markov Chain Monte Carlo and Gibbs Sampling}, url = {http://nitro.biosci.arizona.edu/courses/EEB581-2004/handouts/Gibbs.pdf}, year = 2004 } @article{Tierney94, author = {Tierney, L.}, interhash = {61cadfbd70e58e20f9b218a6b6c747e7}, intrahash = {b1bdabb2b26068df271283a8d6c37419}, journal = {The Annals of Statistics}, pages = {1701-1727}, title = {Markov chains for exploring posterior distributions}, volume = {22(4)}, year = 1994 } @article{casella1992, abstract = {Computer-intensive algorithms, such as the Gibbs sampler, have become increasingly popular statistical tools, both in applied and theoretical work. The properties of such algorithms, however, may sometimes not be obvious. Here we give a simple explanation of how and why the Gibbs sampler works. We analytically establish its properties in a simple case and provide insight for more complicated cases. There are also a number of examples.}, author = {Casella, George and George, Edward I.}, citeulike-article-id = {1270229}, citeulike-linkout-0 = {http://dx.doi.org/10.2307/2685208}, citeulike-linkout-1 = {http://www.jstor.org/stable/2685208}, doi = {10.2307/2685208}, interhash = {ba4f08a9e4e1add859c3b2c9661728fa}, intrahash = {d9ef3231e2903c2f5bc2ef565f87f882}, issn = {00031305}, journal = {The American Statistician}, number = 3, pages = {167--174}, posted-at = {2009-09-24 05:52:36}, priority = {2}, publisher = {American Statistical Association}, title = {Explaining the Gibbs Sampler}, url = {http://dx.doi.org/10.2307/2685208}, volume = 46, year = 1992 } @article{neal2000markov, author = {Neal, R.M.}, interhash = {6f308a00fc9c7a394987e7ebb4caa6f7}, intrahash = {ea7d93667d46469e93d0f33f4a6680ec}, journal = {Journal of computational and graphical statistics}, pages = {249--265}, publisher = {American Statistical Association, Institute of Mathematical Statistics, and Interface Foundation of North America}, title = {{Markov chain sampling methods for Dirichlet process mixture models}}, url = {http://scholar.google.de/scholar.bib?q=info:acl12Ht685sJ:scholar.google.com/&output=citation&hl=de&ct=citation&cd=0}, year = 2000 } @article{mccallum2007topic, author = {McCallum, A. and Wang, X. and Corrada-Emmanuel, A.}, interhash = {56511b795458d88811bffd6ad8ec1e89}, intrahash = {e7a6d3c9bd46ddc77a62e04d35aff330}, journal = {Journal of Artificial Intelligence Research}, pages = {249--272}, title = {{Topic and role discovery in social networks with experiments on enron and academic email}}, url = {http://scholar.google.de/scholar.bib?q=info:GVi_TXpyWz8J:scholar.google.com/&output=citation&hl=de&ct=citation&cd=0}, volume = 30, year = 2007 } @article{casella_robert96, author = {Casella, George and Robert, Christian P.}, interhash = {320304b98686a56e2f13e474f0710c99}, intrahash = {a8ec4db78c085cb5506592dc0c64701e}, journal = {Biometrika}, month = {March}, number = 1, owner = {gregor}, pages = {81-94}, timestamp = {2008.04.22}, title = {Rao-Blackwellisation of Sampling Schemes}, volume = 83, year = 1996 } @article{griffiths_steyvers04, author = {Griffiths, T. L. and Steyvers, M.}, interhash = {387a5060792d52ea73b02dd68e52559e}, intrahash = {49705b433fcb5b87c0a3a140c40f9a4d}, journal = {Proceedings of the National Academy of Sciences}, month = {April}, number = {Suppl. 1}, owner = {heinrich}, pages = {5228-5235}, title = {Finding scientific topics}, volume = 101, year = 2004 } @inproceedings{porteous_08, address = {New York, NY, USA}, author = {Porteous, Ian and Newman, David and Ihler, Alexander and Asuncion, Arthur and Smyth, Padhraic and Welling, Max}, booktitle = {KDD '08: Proceeding of the 14th ACM SIGKDD international conference on Knowledge discovery and data mining}, doi = {http://doi.acm.org/10.1145/1401890.1401960}, interhash = {d897cc05841b69f83e28f7e1aef4596b}, intrahash = {1107a99a84336cd73fa29a899be81d70}, isbn = {978-1-60558-193-4}, location = {Las Vegas, Nevada, USA}, owner = {gregor}, pages = {569--577}, publisher = {ACM}, timestamp = {2009.04.16}, title = {Fast collapsed {G}ibbs sampling for latent {D}irichlet allocation}, year = 2008 }