304 lines
7.4 KiB
JSON
304 lines
7.4 KiB
JSON
{
|
|
"alpaca_en": {
|
|
"file_name": "alpaca_data_en_52k.json",
|
|
"file_sha1": "607f94a7f581341e59685aef32f531095232cf23"
|
|
},
|
|
"alpaca_zh": {
|
|
"file_name": "alpaca_data_zh_51k.json",
|
|
"file_sha1": "e655af3db557a4197f7b0cf92e1986b08fae6311"
|
|
},
|
|
"alpaca_gpt4_en": {
|
|
"file_name": "alpaca_gpt4_data_en.json",
|
|
"file_sha1": "647f4ad447bd993e4b6b6223d1be15208bab694a"
|
|
},
|
|
"alpaca_gpt4_zh": {
|
|
"file_name": "alpaca_gpt4_data_zh.json",
|
|
"file_sha1": "3eaa3bda364ccdd59925d7448a698256c31ef845"
|
|
},
|
|
"self_cognition": {
|
|
"file_name": "self_cognition.json",
|
|
"file_sha1": "6287a730ada924fc5d9eadc6d8f865e01b7a6f67"
|
|
},
|
|
"oaast_sft": {
|
|
"file_name": "oaast_sft.json",
|
|
"file_sha1": "7baf5d43e67a91f9bbdf4e400dbe033b87e9757e",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
}
|
|
},
|
|
"oaast_sft_zh": {
|
|
"file_name": "oaast_sft_zh.json",
|
|
"file_sha1": "a6a91f18f80f37b10ded9cf633fb50c033bf7b9f",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
}
|
|
},
|
|
"lima": {
|
|
"file_name": "lima.json",
|
|
"file_sha1": "9db59f6b7007dc4b17529fc63379b9cd61640f37",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
}
|
|
},
|
|
"example": {
|
|
"script_url": "example_dataset",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
}
|
|
},
|
|
"guanaco": {
|
|
"hf_hub_url": "JosephusCheung/GuanacoDataset",
|
|
"ms_hub_url": "AI-ModelScope/GuanacoDataset"
|
|
},
|
|
"belle_2m": {
|
|
"hf_hub_url": "BelleGroup/train_2M_CN",
|
|
"ms_hub_url": "AI-ModelScope/train_2M_CN"
|
|
},
|
|
"belle_1m": {
|
|
"hf_hub_url": "BelleGroup/train_1M_CN",
|
|
"ms_hub_url": "AI-ModelScope/train_1M_CN"
|
|
},
|
|
"belle_0.5m": {
|
|
"hf_hub_url": "BelleGroup/train_0.5M_CN",
|
|
"ms_hub_url": "AI-ModelScope/train_0.5M_CN"
|
|
},
|
|
"belle_dialog": {
|
|
"hf_hub_url": "BelleGroup/generated_chat_0.4M",
|
|
"ms_hub_url": "AI-ModelScope/generated_chat_0.4M"
|
|
},
|
|
"belle_math": {
|
|
"hf_hub_url": "BelleGroup/school_math_0.25M",
|
|
"ms_hub_url": "AI-ModelScope/school_math_0.25M"
|
|
},
|
|
"belle_multiturn": {
|
|
"script_url": "belle_multiturn",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"ultra_chat": {
|
|
"script_url": "ultra_chat",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"open_platypus": {
|
|
"hf_hub_url": "garage-bAInd/Open-Platypus",
|
|
"ms_hub_url": "AI-ModelScope/Open-Platypus"
|
|
},
|
|
"codealpaca": {
|
|
"hf_hub_url": "sahil2801/CodeAlpaca-20k",
|
|
"ms_hub_url": "AI-ModelScope/CodeAlpaca-20k"
|
|
},
|
|
"alpaca_cot": {
|
|
"hf_hub_url": "QingyiSi/Alpaca-CoT",
|
|
"ms_hub_url": "AI-ModelScope/Alpaca-CoT"
|
|
},
|
|
"openorca": {
|
|
"hf_hub_url": "Open-Orca/OpenOrca",
|
|
"ms_hub_url": "AI-ModelScope/OpenOrca",
|
|
"columns": {
|
|
"prompt": "question",
|
|
"response": "response",
|
|
"system": "system_prompt"
|
|
}
|
|
},
|
|
"mathinstruct": {
|
|
"hf_hub_url": "TIGER-Lab/MathInstruct",
|
|
"ms_hub_url": "AI-ModelScope/MathInstruct",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"response": "output"
|
|
}
|
|
},
|
|
"firefly": {
|
|
"hf_hub_url": "YeungNLP/firefly-train-1.1M",
|
|
"columns": {
|
|
"prompt": "input",
|
|
"response": "target"
|
|
}
|
|
},
|
|
"webqa": {
|
|
"hf_hub_url": "suolyer/webqa",
|
|
"ms_hub_url": "AI-ModelScope/webqa",
|
|
"columns": {
|
|
"prompt": "input",
|
|
"response": "output"
|
|
}
|
|
},
|
|
"webnovel": {
|
|
"hf_hub_url": "zxbsmk/webnovel_cn",
|
|
"ms_hub_url": "AI-ModelScope/webnovel_cn"
|
|
},
|
|
"nectar_sft": {
|
|
"hf_hub_url": "mlinmg/SFT-Nectar"
|
|
},
|
|
"adgen": {
|
|
"hf_hub_url": "HasturOfficial/adgen",
|
|
"ms_hub_url": "AI-ModelScope/adgen",
|
|
"columns": {
|
|
"prompt": "content",
|
|
"response": "summary"
|
|
}
|
|
},
|
|
"sharegpt_hyper": {
|
|
"hf_hub_url": "totally-not-an-llm/sharegpt-hyperfiltered-3k",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"sharegpt4": {
|
|
"hf_hub_url": "shibing624/sharegpt_gpt4",
|
|
"ms_hub_url": "AI-ModelScope/sharegpt_gpt4",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"ultrachat_200k": {
|
|
"hf_hub_url": "HuggingFaceH4/ultrachat_200k",
|
|
"ms_hub_url": "AI-ModelScope/ultrachat_200k",
|
|
"columns": {
|
|
"messages": "messages",
|
|
"role": "role",
|
|
"content": "content"
|
|
},
|
|
"formatting": "sharegpt"
|
|
},
|
|
"agent_instruct": {
|
|
"hf_hub_url": "THUDM/AgentInstruct",
|
|
"ms_hub_url": "ZhipuAI/AgentInstruct",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"lmsys_chat": {
|
|
"hf_hub_url": "lmsys/lmsys-chat-1m",
|
|
"ms_hub_url": "AI-ModelScope/lmsys-chat-1m",
|
|
"columns": {
|
|
"messages": "conversation",
|
|
"role": "role",
|
|
"content": "content"
|
|
},
|
|
"formatting": "sharegpt"
|
|
},
|
|
"evol_instruct": {
|
|
"hf_hub_url": "WizardLM/WizardLM_evol_instruct_V2_196k",
|
|
"formatting": "sharegpt"
|
|
},
|
|
"hh_rlhf_en": {
|
|
"script_url": "hh_rlhf_en",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"response": "output",
|
|
"history": "history"
|
|
},
|
|
"ranking": true
|
|
},
|
|
"oaast_rm": {
|
|
"file_name": "oaast_rm.json",
|
|
"file_sha1": "622d420e9b70003b210618253bd3d9d2891d86cb",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
},
|
|
"ranking": true
|
|
},
|
|
"oaast_rm_zh": {
|
|
"file_name": "oaast_rm_zh.json",
|
|
"file_sha1": "1065af1f3784dd61be5e79713a35f427b713a232",
|
|
"columns": {
|
|
"prompt": "instruction",
|
|
"query": "input",
|
|
"response": "output",
|
|
"history": "history"
|
|
},
|
|
"ranking": true
|
|
},
|
|
"comparison_gpt4_en": {
|
|
"file_name": "comparison_gpt4_data_en.json",
|
|
"file_sha1": "96fa18313544e22444fe20eead7754b17da452ae",
|
|
"ranking": true
|
|
},
|
|
"comparison_gpt4_zh": {
|
|
"file_name": "comparison_gpt4_data_zh.json",
|
|
"file_sha1": "515b18ed497199131ddcc1af950345c11dc5c7fd",
|
|
"ranking": true
|
|
},
|
|
"nectar_rm": {
|
|
"hf_hub_url": "mlinmg/RLAIF-Nectar",
|
|
"ranking": true
|
|
},
|
|
"wiki_demo": {
|
|
"file_name": "wiki_demo.txt",
|
|
"file_sha1": "e70375e28eda542a90c68213640cc371898ce181",
|
|
"columns": {
|
|
"prompt": "text"
|
|
}
|
|
},
|
|
"c4_demo": {
|
|
"file_name": "c4_demo.json",
|
|
"file_sha1": "a5a0c86759732f9a5238e447fecd74f28a66cca8",
|
|
"columns": {
|
|
"prompt": "text"
|
|
}
|
|
},
|
|
"refinedweb": {
|
|
"hf_hub_url": "tiiuae/falcon-refinedweb",
|
|
"columns": {
|
|
"prompt": "content"
|
|
}
|
|
},
|
|
"redpajama_v2": {
|
|
"hf_hub_url": "togethercomputer/RedPajama-Data-V2",
|
|
"columns": {
|
|
"prompt": "raw_content"
|
|
},
|
|
"subset": "default"
|
|
},
|
|
"wikipedia_en": {
|
|
"hf_hub_url": "olm/olm-wikipedia-20221220",
|
|
"columns": {
|
|
"prompt": "text"
|
|
}
|
|
},
|
|
"wikipedia_zh": {
|
|
"hf_hub_url": "pleisto/wikipedia-cn-20230720-filtered",
|
|
"ms_hub_url": "AI-ModelScope/wikipedia-cn-20230720-filtered",
|
|
"columns": {
|
|
"prompt": "completion"
|
|
}
|
|
},
|
|
"pile": {
|
|
"hf_hub_url": "EleutherAI/pile",
|
|
"columns": {
|
|
"prompt": "text"
|
|
},
|
|
"subset": "all"
|
|
},
|
|
"skypile": {
|
|
"hf_hub_url": "Skywork/SkyPile-150B",
|
|
"columns": {
|
|
"prompt": "text"
|
|
}
|
|
},
|
|
"the_stack": {
|
|
"hf_hub_url": "bigcode/the-stack",
|
|
"ms_hub_url": "AI-ModelScope/the-stack",
|
|
"columns": {
|
|
"prompt": "content"
|
|
}
|
|
},
|
|
"starcoder_python": {
|
|
"hf_hub_url": "bigcode/starcoderdata",
|
|
"ms_hub_url": "AI-ModelScope/starcoderdata",
|
|
"columns": {
|
|
"prompt": "content"
|
|
},
|
|
"folder": "python"
|
|
}
|
|
}
|