@inproceedings{joachims2005accurately, abstract = {This paper examines the reliability of implicit feedback generated from clickthrough data in WWW search. Analyzing the users' decision process using eyetracking and comparing implicit feedback against manual relevance judgments, we conclude that clicks are informative but biased. While this makes the interpretation of clicks as absolute relevance judgments difficult, we show that relative preferences derived from clicks are reasonably accurate on average.}, acmid = {1076063}, address = {New York, NY, USA}, author = {Joachims, Thorsten and Granka, Laura and Pan, Bing and Hembrooke, Helene and Gay, Geri}, booktitle = {Proceedings of the 28th annual international ACM SIGIR conference on Research and development in information retrieval}, doi = {10.1145/1076034.1076063}, interhash = {050982b76855a6b1258ed0b40cb69018}, intrahash = {8c488477626fa59db419ac77f3552029}, isbn = {1-59593-034-5}, location = {Salvador, Brazil}, numpages = {8}, pages = {154--161}, publisher = {ACM}, title = {Accurately interpreting clickthrough data as implicit feedback}, url = {http://doi.acm.org/10.1145/1076034.1076063}, year = 2005 } @article{borrego2012measuring, abstract = {This paper explores the possibility of using data from social bookmarking services to measure the use of information by academic researchers. Social bookmarking data can be used to augment participative methods (e.g. interviews and surveys) and other, non-participative methods (e.g. citation analysis and transaction logs) to measure the use of scholarly information. We use BibSonomy, a free resource-sharing system, as a case study. Results show that published journal articles are by far the most popular type of source bookmarked, followed by conference proceedings and books. Commercial journal publisher platforms are the most popular type of information resource bookmarked, followed by websites, records in databases and digital repositories. Usage of open access information resources is low in comparison with toll access journals. In the case of open access repositories, there is a marked preference for the use of subject-based repositories over institutional repositories. The results are consistent with those observed in related studies based on surveys and citation analysis, confirming the possible use of bookmarking data in studies of information behaviour in academic settings. The main advantages of using social bookmarking data are that is an unobtrusive approach, it captures the reading habits of researchers who are not necessarily authors, and data are readily available. The main limitation is that a significant amount of human resources is required in cleaning and standardizing the data.}, author = {Borrego, Ángel and Fry, Jenny}, doi = {10.1177/0165551512438353}, eprint = {http://jis.sagepub.com/content/38/3/297.full.pdf+html}, interhash = {71ddfdd5b3d99b1a2986b4ded5e02b3c}, intrahash = {e5ccbb3378eeb88e7288d8ce59539812}, journal = {Journal of Information Science}, number = 3, pages = {297--308}, title = {Measuring researchers' use of scholarly information through social bookmarking data: A case study of BibSonomy}, url = {http://jis.sagepub.com/content/38/3/297.abstract}, volume = 38, year = 2012 } @inproceedings{mcnee2006stupid, abstract = {If recommenders are to help people be more productive, they need to support a wide variety of real-world information seeking tasks, such as those found when seeking research papers in a digital library. There are many potential pitfalls, including not knowing what tasks to support, generating recommendations for the wrong task, or even failing to generate any meaningful recommendations whatsoever. We posit that different recommender algorithms are better suited to certain information seeking tasks. In this work, we perform a detailed user study with over 130 users to understand these differences between recommender algorithms through an online survey of paper recommendations from the ACM Digital Library. We found that pitfalls are hard to avoid. Two of our algorithms generated 'atypical' recommendations recommendations that were unrelated to their input baskets. Users reacted accordingly, providing strong negative results for these algorithms. Results from our 'typical' algorithms show some qualitative differences, but since users were exposed to two algorithms, the results may be biased. We present a wide variety of results, teasing out differences between algorithms. Finally, we succinctly summarize our most striking results as "Don't Look Stupid" in front of users.}, acmid = {1180903}, address = {New York, NY, USA}, author = {McNee, Sean M. and Kapoor, Nishikant and Konstan, Joseph A.}, booktitle = {Proceedings of the 2006 20th anniversary conference on Computer supported cooperative work}, doi = {10.1145/1180875.1180903}, interhash = {24be686d042a3a4a710d9ff22dee0f2e}, intrahash = {7775150ca225770019bd94db9be5db40}, isbn = {1-59593-249-6}, location = {Banff, Alberta, Canada}, numpages = {10}, pages = {171--180}, publisher = {ACM}, series = {CSCW '06}, title = {Don't look stupid: avoiding pitfalls when recommending research papers}, url = {http://doi.acm.org/10.1145/1180875.1180903}, year = 2006 } @misc{graaf2007academic, abstract = {A trial with academic social referencing software – also called social bookmarking software – has been carried out with members of the Research Group Systems- and Network Engineering (SNE) of the University of Amsterdam. The idea for a user trial started after Marten Hoekstra of the SNE group contacted the University Library of the University of Amsterdam for advice and recommendation on using one of the academic social referencing tools. The University Library recognized the value of the social bookmarking site Del.icio.us, but concluded that this was not optimal for academic work1. Three other academic social referencing software tools were identified and analysed. Possible advantages of these academic social referencing tools for academics are listed in the textbox below. In order to assess the potential value of Web 2.0 applications for library services, and specifically academic social referencing tools, a user trial was set up by the University library with the members of the SNE group. Pleiade Management and Consultancy was asked to document and report the feedback from the users during the trial. The user trial was supervised by Driek van Heesakkers of the University library and Marten Hoekstra of the SNE group. }, author = {van der Graaf, Maurits}, interhash = {7703398a902589b222b1abc89d3d7271}, intrahash = {2d219a1b0e0b318f457f49e25be7f4e5}, month = mar, note = {User study}, title = {Academic Social Referencing tools: a user trial with BibSonomy and Cite-U-Like organized by the Library of the University of Amsterdam}, url = {http://cf.uba.uva.nl/nl/projecten/academic_social_referencing.pdf}, year = 2007 } @article{regulski07aufwand, abstract = {Authors of scientific article have numerous options to search for background material for their research projects. With our article, we want to show that the use of Social-Bookmarking-Services as part of the web 2.0 (O’Reilly, 2005)/library 2.0 (Danowski, 2006) technology is a useful supplement to conventional reference databases. }, author = {Regulski, Katharina}, interhash = {21d0acb377c730344d85253cbc8025b9}, intrahash = {332e70ac149aa15ed12638a55ab1bf52}, issn = {0341-4183}, journal = {Bibliothek. Forschung und Praxis}, number = 2, pages = {177-184}, title = {Aufwand und Nutzen beim Einsatz von Social-Bookmarking-Services als Nachweisinstrument für wissenschaftliche Forschungsartikel am Beispiel von BibSonomy}, url = {http://www.bibliothek-saur.de/2007_2/177-184.pdf}, volume = 31, year = 2007 }