initial
Browse files- common_crawl.py +4 -2
common_crawl.py
CHANGED
@@ -75,7 +75,9 @@ class CommonCrawl(datasets.GeneratorBasedBuilder):
|
|
75 |
|
76 |
def _generate_examples(self, filepath):
|
77 |
logger.info("generating examples from = %s", filepath)
|
78 |
-
|
|
|
79 |
if record.rec_type == 'response' and record.http_headers.get_header('Content-Type') == 'text/html':
|
80 |
text = _decode_text(record.content_stream().read())
|
81 |
-
|
|
|
|
75 |
|
76 |
def _generate_examples(self, filepath):
|
77 |
logger.info("generating examples from = %s", filepath)
|
78 |
+
number = 0
|
79 |
+
for record in ArchiveIterator(requests.get(filepath.split("::")[1], stream=True).raw, arc2warc=True):
|
80 |
if record.rec_type == 'response' and record.http_headers.get_header('Content-Type') == 'text/html':
|
81 |
text = _decode_text(record.content_stream().read())
|
82 |
+
if text is not None:
|
83 |
+
yield number, {"id": number, "text": text}
|