@inproceedings{Scholman2017, title = {Crowdsourcing discourse interpretations: On the influence of context and the reliability of a connective insertion task}, author = {Merel Scholman and Vera Demberg}, url = {https://aclanthology.org/W17-0803}, doi = {https://doi.org/10.18653/v1/W17-0803}, year = {2017}, date = {2017}, booktitle = {Proceedings of the 11th Linguistic Annotation Workshop}, pages = {24-33}, publisher = {Association for Computational Linguistics}, address = {Valencia, Spain}, abstract = {Traditional discourse annotation tasks are considered costly and time-consuming, and the reliability and validity of these tasks is in question. In this paper, we investigate whether crowdsourcing can be used to obtain reliable discourse relation annotations. We also examine the influence of context on the reliability of the data. The results of a crowdsourced connective insertion task showed that the method can be used to obtain reliable annotations: The majority of the inserted connectives converged with the original label. Further, the method is sensitive to the fact that multiple senses can often be inferred for a single relation. Regarding the presence of context, the results show no significant difference in distributions of insertions between conditions overall. However, a by-item comparison revealed several characteristics of segments that determine whether the presence of context makes a difference in annotations. The findings discussed in this paper can be taken as evidence that crowdsourcing can be used as a valuable method to obtain insights into the sense(s) of relations.}, pubstate = {published}, type = {inproceedings} }