@inproceedings{barilan2012beyond, abstract = {Traditionally, scholarly impact and visibility have been measured by counting publications and citations in the scholarly literature. However, increasingly scholars are also visible on the Web, establishing presences in a growing variety of social ecosystems. But how wide and established is this presence, and how do measures of social Web impact relate to their more traditional counterparts? To answer this, we sampled 57 presenters from the 2010 Leiden STI Conference, gathering publication and citations counts as well as data from the presenters' Web "footprints." We found Web presence widespread and diverse: 84% of scholars had homepages, 70% were on LinkedIn, 23% had public Google Scholar profiles, and 16% were on Twitter. For sampled scholars' publications, social reference manager bookmarks were compared to Scopus and Web of Science citations; we found that Mendeley covers more than 80% of sampled articles, and that Mendeley bookmarks are significantly correlated (r=.45) to Scopus citation counts.}, author = {Bar-Ilan, Judit and Haustein, Stefanie and Peters, Isabella and Priem, Jason and Shema, Hadas and Terliesner, Jens}, booktitle = {Proceedings of 17th International Conference on Science and Technology Indicators, Montréal: Science-Metrix and OST}, editor = {Archambault, Éric and Gingras, Yves and Larivière, Vincent}, interhash = {5c386f2bfcd8d2052d455c75efb1c727}, intrahash = {42585cbc0a99d9e137f2a3d6cb0239e5}, pages = {98-109}, title = {Beyond citations: Scholars' visibility on the social Web}, url = {http://2012.sticonference.org/Proceedings/vol1/Bar-Ilan_Beyond_98.pdf}, volume = 1, year = 2012 } @article{barilan2012beyond, abstract = {Traditionally, scholarly impact and visibility have been measured by counting publications and citations in the scholarly literature. However, increasingly scholars are also visible on the Web, establishing presences in a growing variety of social ecosystems. But how wide and established is this presence, and how do measures of social Web impact relate to their more traditional counterparts? To answer this, we sampled 57 presenters from the 2010 Leiden STI Conference, gathering publication and citations counts as well as data from the presenters' Web "footprints." We found Web presence widespread and diverse: 84% of scholars had homepages, 70% were on LinkedIn, 23% had public Google Scholar profiles, and 16% were on Twitter. For sampled scholars' publications, social reference manager bookmarks were compared to Scopus and Web of Science citations; we found that Mendeley covers more than 80% of sampled articles, and that Mendeley bookmarks are significantly correlated (r=.45) to Scopus citation counts.}, author = {Bar-Ilan, Judit and Haustein, Stefanie and Peters, Isabella and Priem, Jason and Shema, Hadas and Terliesner, Jens}, interhash = {5c386f2bfcd8d2052d455c75efb1c727}, intrahash = {81198ca94374ccd7b0a86b2b53d2ee50}, title = {Beyond citations: Scholars' visibility on the social Web}, url = {http://dblp.uni-trier.de/db/journals/corr/corr1205.html#abs-1205-5611}, year = 2012 } @inproceedings{peters2011crowdsourcing, abstract = {Qualitative journal evaluation makes use of cumulated content descriptions of single articles. These can either be represented by author-generated keywords, professionally indexed subject headings, automatically extracted terms or by reader-generated tags as used in social bookmarking systems. It is assumed that particularly the users? view on article content differs significantly from the authors? or indexers? perspectives. To verify this assumption, title and abstract terms, author keywords, Inspec subject headings, KeyWords PlusTM and tags are compared by calculating the overlap between the respective datasets. Our approach includes extensive term preprocessing (i.e. stemming, spelling unifications) to gain a homogeneous term collection. When term overlap is calculated for every single document of the dataset, similarity values are low. Thus, the presented study confirms the assumption, that the different types of keywords each reflect a different perspective of the articles? contents and that tags (cumulated across articles) can be used in journal evaluation to represent a reader-specific view on published content.}, author = {Peters, Isabella and Haustein, Stefanie and Terliesner, Jens}, booktitle = {ACM WebSci'11}, interhash = {def78a2b12565187bcac0cf08089b7a1}, intrahash = {8e03cf8d57f903da395c07e9a9125f08}, month = {June}, note = {WebSci Conference 2011}, pages = {1--4}, title = {Crowdsourcing in Article Evaluation}, url = {http://journal.webscience.org/487/}, year = 2011 }