nle_hf_dataset / nle_hf_dataset.py
Howuhh's picture
Upload nle_hf_dataset.py
1e4e1c3
raw
history blame contribute delete
No virus
5.19 kB
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Dungeons and Data: A Large-Scale NetHack Dataset. """
import glob
import h5py
import json
import os
import datasets
_CITATION = """\
"""
_DESCRIPTION = """\
3 billion state-action-score transitions from 100,000 trajectories collected from the symbolic bot winner of the NetHack Challenge 2021.
"""
_HOMEPAGE = ""
_LICENSE = ""
_TOTAL_EPISODES = 6
_URLS = {
"data": [f"data/{i}.hdf5" for i in range(1, _TOTAL_EPISODES)],
"metadata": [f"metadata/{i}.json" for i in range(1, _TOTAL_EPISODES)],
}
class NleHfDataset(datasets.GeneratorBasedBuilder):
"""Dungeons and Data: A Large-Scale NetHack Dataset."""
VERSION = datasets.Version("1.0.0")
BUILDER_CONFIGS = [
datasets.BuilderConfig(name="data", version=VERSION, description="Data for all episodes"),
datasets.BuilderConfig(name="metadata", version=VERSION, description="Metadata for all episodes"),
]
DEFAULT_CONFIG_NAME = "metadata"
def _info(self):
if self.config.name == "metadata":
features = datasets.Features(
{
"gameid": datasets.Value("int32"),
"version": datasets.Value("string"),
"points": datasets.Value("int32"),
"deathdnum": datasets.Value("int32"),
"deathlev": datasets.Value("int32"),
"maxlvl": datasets.Value("int32"),
"hp": datasets.Value("int32"),
"maxhp": datasets.Value("int32"),
"deaths": datasets.Value("int32"),
"deathdate": datasets.Value("int32"),
"birthdate": datasets.Value("int32"),
"uid": datasets.Value("int32"),
"role": datasets.Value("string"),
"race": datasets.Value("string"),
"gender": datasets.Value("string"),
"align": datasets.Value("string"),
"name": datasets.Value("string"),
"death": datasets.Value("string"),
"conduct": datasets.Value("string"),
"turns": datasets.Value("int32"),
"achieve": datasets.Value("string"),
"realtime": datasets.Value("int64"),
"starttime": datasets.Value("int64"),
"endtime": datasets.Value("int64"),
"gender0": datasets.Value("string"),
"align0": datasets.Value("string"),
"flags": datasets.Value("string")
}
)
else:
features = datasets.Features(
{
"tty_chars": datasets.Array3D(shape=(None, 24, 80), dtype="uint8"),
"tty_colors": datasets.Array3D(shape=(None, 24, 80), dtype="int8"),
"tty_cursor": datasets.Array2D(shape=(None, 2), dtype="int16"),
"actions": datasets.Sequence(datasets.Value("int16")),
"rewards": datasets.Sequence(datasets.Value("int32")),
"dones": datasets.Sequence(datasets.Value("bool")),
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
if self.config.data_files is None:
urls = _URLS[self.config.name]
else:
urls = self.config.data_files["train"]
filepaths = [dl_manager.download(url) for url in urls]
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"filepaths": filepaths})
]
def _generate_examples(self, filepaths):
for i, filepath in enumerate(filepaths):
if self.config.name == "metadata":
with open(filepath, "r") as f:
data = json.load(f)
yield i, data
else:
with h5py.File(filepath, "r") as f:
yield i, {
"tty_chars": f["tty_chars"][()],
"tty_colors": f["tty_colors"][()],
"tty_cursor": f["tty_cursor"][()],
"actions": f["actions"][()],
"rewards": f["rewards"][()],
"dones": f["dones"][()]
}