@techreport{NBERw18567,
title = "Optimal Aggregation of Consumer Ratings: An Application to Yelp.com",
author = "Weijia Dai and Ginger Z. Jin and Jungmin Lee and Michael Luca",
institution = "National Bureau of Economic Research",
type = "Working Paper",
series = "Working Paper Series",
number = "18567",
year = "2012",
month = "November",
doi = {10.3386/w18567},
URL = "http://www.nber.org/papers/w18567",
abstract = {Consumer review websites leverage the wisdom of the crowd, with each product being reviewed many times (some with more than 1,000 reviews). Because of this, the way in which information is aggregated is a central decision faced by consumer review websites. Given a set of reviews, what is the optimal way to construct an average rating? We offer a structural approach to answering this question, allowing for (1) reviewers to vary in stringency and accuracy, (2) reviewers to be influenced by existing reviews, and (3) product quality to change over time.
Applying this approach to restaurant reviews from Yelp.com, we construct optimal ratings for all restaurants and compare them to the arithmetic averages displayed by Yelp. Depending on how we interpret the downward trend of reviews within a restaurant, we find 19.1-41.38% of the simple average ratings are more than 0.15 stars away from optimal ratings, and 5.33-19.1% are more than 0.25 stars away at the end of our sample period. Moreover, the deviation grows significantly as a restaurant accumulates reviews over time. This suggests that large gains could be made by implementing optimal ratings, especially as Yelp grows. Our algorithm can be flexibly applied to many different review settings.},
}