File size: 2,628 Bytes
eecd666
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
{
  "builder_name": "reddit",
  "citation": "\n@inproceedings{volske-etal-2017-tl,\n    title = {TL;DR: Mining {R}eddit to Learn Automatic Summarization},\n    author = {V{\"o}lske, Michael  and Potthast, Martin  and Syed, Shahbaz  and Stein, Benno},\n    booktitle = {Proceedings of the Workshop on New Frontiers in Summarization},\n    month = {sep},\n    year = {2017},\n    address = {Copenhagen, Denmark},\n    publisher = {Association for Computational Linguistics},\n    url = {https://www.aclweb.org/anthology/W17-4508},\n    doi = {10.18653/v1/W17-4508},\n    pages = {59--63},\n    abstract = {Recent advances in automatic text summarization have used deep neural networks to generate high-quality abstractive summaries, but the performance of these models strongly depends on large amounts of suitable training data. We propose a new method for mining social media for author-provided summaries, taking advantage of the common practice of appending a {``}TL;DR{''} to long posts. A case study using a large Reddit crawl yields the Webis-TLDR-17 dataset, complementing existing corpora primarily from the news genre. Our technique is likely applicable to other social media sites and general web crawls.},\n}\n",
  "config_name": "default",
  "dataset_size": 18936213573,
  "description": "\nThis corpus contains preprocessed posts from the Reddit dataset.\nThe dataset consists of 3,848,330 posts with an average length of 270 words for content,\nand 28 words for the summary.\n\nFeatures includes strings: author, body, normalizedBody, content, summary, subreddit, subreddit_id.\nContent is used as document and summary is used as summary.\n",
  "download_checksums": {
    "https://zenodo.org/record/1043504/files/corpus-webis-tldr-17.zip?download=1": {
      "num_bytes": 3141854161,
      "checksum": "c1a0f8c4374c7314d3c9ec50dd505303c536062d87037d4dca7035b89b36938a"
    }
  },
  "download_size": 3141854161,
  "features": {
    "content": {
      "dtype": "string",
      "id": null,
      "_type": "Value"
    },
    "summary": {
      "dtype": "string",
      "id": null,
      "_type": "Value"
    }
  },
  "homepage": "https://github.com/webis-de/webis-tldr-17-corpus",
  "license": "",
  "post_processed": null,
  "post_processing_size": null,
  "size_in_bytes": 22078067734,
  "splits": {
    "train": {
      "name": "train",
      "num_bytes": 18936213573,
      "num_examples": 3848330,
      "dataset_name": "reddit"
    }
  },
  "supervised_keys": null,
  "task_templates": null,
  "version": {
    "version_str": "1.0.0",
    "description": null,
    "major": 1,
    "minor": 0,
    "patch": 0
  }
}