upload the first config

#4
by severo HF staff - opened

Created with

from datasets import load_dataset, get_dataset_config_names
from huggingface_hub import HfApi
from huggingface_hub.utils import EntryNotFoundError
namespace = "jmhessel"
dataset_name = "newyorker_caption_contest"
repo_id = f"{namespace}/{dataset_name}"
configs = get_dataset_config_names(repo_id)
# create a pull request with the first config
config = configs.pop(0)
dataset = load_dataset(repo_id, config)
token = "hf_xxx" # <- replace with a token, with write role (https://huggingface.co./settings/tokens)
dataset.push_to_hub(repo_id, config_name=config, commit_message="upload the first config", create_pr=True, token=token)
# get the PR reference on https://huggingface.co./datasets/jmhessel/newyorker_caption_contest/discussions: refs/pr/4
revision = "refs/pr/xxx" # <- replace with the correct PR number
# upload the next configs, if any
for config in configs:
    dataset = load_dataset(repo_id, config)
    dataset.push_to_hub(repo_id, config_name=config, commit_message="upload the next config", revision=revision, token=token)
# delete the script and the obsolete dataset_infos.json file
try:
    HfApi(token=token).delete_file(f"{dataset_name}.py", repo_id, repo_type="dataset", revision=revision, commit_message="Delete the script")
except EntryNotFoundError:
    print("No script to delete")
try:
    HfApi(token=token).delete_file("dataset_infos.json", repo_id, repo_type="dataset", revision=revision, commit_message="Delete the obsolete dataset_infos.json file")
except EntryNotFoundError:
    print("No dataset_infos.json file to delete")

Hi @jmhessel . I prepared this pull request that replaces the dataset script with a "no-code" version of the dataset.

For reference, in the description, I pasted the Python code I used to prepare this PR.

Feel free to merge. You should get the Dataset Viewer back.

Woah!!! This is above and beyond @severo , thank you so much for your work!!

jmhessel changed pull request status to merged

Sign up or log in to comment