Add dataset card and python file
Browse files- dataset-financial-documents-2.py +87 -0
- dataset_infos.json +1 -0
dataset-financial-documents-2.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding=utf-8
|
2 |
+
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
|
3 |
+
#
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
#
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
#
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
|
16 |
+
# Lint as: python3
|
17 |
+
"""Multi-News dataset."""
|
18 |
+
|
19 |
+
import datasets
|
20 |
+
|
21 |
+
_LICENSE = "For non-commercial research and educational purposes only"
|
22 |
+
|
23 |
+
_DESCRIPTION = """
|
24 |
+
Financial documents
|
25 |
+
"""
|
26 |
+
|
27 |
+
_REPO = "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data"
|
28 |
+
_URLs = {
|
29 |
+
"train": [
|
30 |
+
f"{_REPO}/train.src.cleaned",
|
31 |
+
f"{_REPO}/train.tgt",
|
32 |
+
],
|
33 |
+
"val": [
|
34 |
+
f"{_REPO}/val.src.cleaned",
|
35 |
+
f"{_REPO}/val.tgt",
|
36 |
+
],
|
37 |
+
"test": [
|
38 |
+
f"{_REPO}/test.src.cleaned",
|
39 |
+
f"{_REPO}/test.tgt",
|
40 |
+
],
|
41 |
+
}
|
42 |
+
|
43 |
+
_DOCUMENT = "document"
|
44 |
+
_SUMMARY = "summary"
|
45 |
+
|
46 |
+
|
47 |
+
class DatasetFinancialDocuments(datasets.GeneratorBasedBuilder):
|
48 |
+
|
49 |
+
VERSION = datasets.Version("1.0.0")
|
50 |
+
|
51 |
+
def _info(self):
|
52 |
+
return datasets.DatasetInfo(
|
53 |
+
description=_DESCRIPTION,
|
54 |
+
features=datasets.Features({_DOCUMENT: datasets.Value("string"), _SUMMARY: datasets.Value("string")}),
|
55 |
+
supervised_keys=(_DOCUMENT, _SUMMARY),
|
56 |
+
license=_LICENSE,
|
57 |
+
)
|
58 |
+
|
59 |
+
def _split_generators(self, dl_manager):
|
60 |
+
"""Returns SplitGenerators."""
|
61 |
+
files = dl_manager.download(_URLs)
|
62 |
+
return [
|
63 |
+
datasets.SplitGenerator(
|
64 |
+
name=datasets.Split.TRAIN,
|
65 |
+
gen_kwargs={"src_file": files["train"][0], "tgt_file": files["train"][1]},
|
66 |
+
),
|
67 |
+
datasets.SplitGenerator(
|
68 |
+
name=datasets.Split.VALIDATION,
|
69 |
+
gen_kwargs={"src_file": files["val"][0], "tgt_file": files["val"][1]},
|
70 |
+
),
|
71 |
+
datasets.SplitGenerator(
|
72 |
+
name=datasets.Split.TEST,
|
73 |
+
gen_kwargs={"src_file": files["test"][0], "tgt_file": files["test"][1]},
|
74 |
+
),
|
75 |
+
]
|
76 |
+
|
77 |
+
def _generate_examples(self, src_file, tgt_file):
|
78 |
+
"""Yields examples."""
|
79 |
+
with open(src_file, encoding="utf-8") as src_f, open(tgt_file, encoding="utf-8") as tgt_f:
|
80 |
+
for i, (src_line, tgt_line) in enumerate(zip(src_f, tgt_f)):
|
81 |
+
yield i, {
|
82 |
+
# In original file, each line has one example and natural newline
|
83 |
+
# tokens "\n" are being replaced with "NEWLINE_CHAR". Here restore
|
84 |
+
# the natural newline token to avoid special vocab "NEWLINE_CHAR".
|
85 |
+
_DOCUMENT: src_line.strip().replace("NEWLINE_CHAR", "\n"),
|
86 |
+
_SUMMARY: tgt_line.strip(),
|
87 |
+
}
|
dataset_infos.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"default": {"description": "\nFinancial documentsF", "license": "For non-commercial research and educational purposes only", "features": {"document": {"dtype": "string", "id": null, "_type": "Value"}, "summary": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": {"input": "document", "output": "summary"}, "task_templates": null, "builder_name": "dataset-financial-documents-2", "config_name": "default", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 558392265, "num_examples": 44972, "dataset_name": "dataset-financial-documents-2"}, "validation": {"name": "validation", "num_bytes": 68272432, "num_examples": 5622, "dataset_name": "dataset-financial-documents-2"}, "test": {"name": "test", "num_bytes": 70032124, "num_examples": 5622, "dataset_name": "dataset-financial-documents-2"}}, "download_checksums": {"https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/train.src.cleaned": {"num_bytes": 547512283, "checksum": "627781c8ce55d528fcdacd495db45583a915e2d24b7983b0a5a6693ede933bb1"}, "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/train.tgt": {"num_bytes": 58793912, "checksum": "e9e82b8f413b0f1ed4eb7c883f93bb744f829c218c1608b6ba7615d687d07121"}, "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/val.src.cleaned": {"num_bytes": 66875522, "checksum": "f0a43902da366eea2b882e39ddd4c0975ad44aba6b61095a2ea90362e9e2bb65"}, "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/val.tgt": {"num_bytes": 7295302, "checksum": "bb08a078e0cb2b8ca9cc0fe3bfbe9d4098dee706bd00eb97449155e41b880157"}, "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/test.src.cleaned": {"num_bytes": 68999509, "checksum": "138d3ac2dc899cbcd2e3745aaa94d1c1db55fb7058d9df4ba3ef2dac05a3a186"}, "https://huggingface.co/datasets/searde/dataset-financial-documents-2/blob/main/data/test.tgt": {"num_bytes": 7309099, "checksum": "fa97cf91a62ae82a0af6da88f2ddf8e06eb4e3b90f7971d8e0c516436518fae3"}}, "download_size": 756785627, "post_processing_size": null, "dataset_size": 696696821, "size_in_bytes": 1453482448}}
|