LLaMA-Factory-310P3/data/dataset_info.json

297 lines
7.2 KiB
JSON
Raw Normal View History

2023-08-01 16:33:25 +08:00
{
"alpaca_en": {
"file_name": "alpaca_data_en_52k.json",
"file_sha1": "607f94a7f581341e59685aef32f531095232cf23"
2023-08-01 16:33:25 +08:00
},
"alpaca_zh": {
"file_name": "alpaca_data_zh_51k.json",
"file_sha1": "e655af3db557a4197f7b0cf92e1986b08fae6311"
2023-08-01 16:33:25 +08:00
},
"alpaca_gpt4_en": {
"file_name": "alpaca_gpt4_data_en.json",
"file_sha1": "647f4ad447bd993e4b6b6223d1be15208bab694a"
2023-08-01 16:33:25 +08:00
},
"alpaca_gpt4_zh": {
"file_name": "alpaca_gpt4_data_zh.json",
"file_sha1": "3eaa3bda364ccdd59925d7448a698256c31ef845"
2023-08-01 16:33:25 +08:00
},
"self_cognition": {
"file_name": "self_cognition.json",
"file_sha1": "6287a730ada924fc5d9eadc6d8f865e01b7a6f67"
2023-08-01 16:33:25 +08:00
},
"oaast_sft": {
"file_name": "oaast_sft.json",
"file_sha1": "7baf5d43e67a91f9bbdf4e400dbe033b87e9757e",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
}
2023-08-01 16:33:25 +08:00
},
"oaast_sft_zh": {
"file_name": "oaast_sft_zh.json",
"file_sha1": "a6a91f18f80f37b10ded9cf633fb50c033bf7b9f",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
}
2023-08-01 16:33:25 +08:00
},
"lima": {
"file_name": "lima.json",
"file_sha1": "9db59f6b7007dc4b17529fc63379b9cd61640f37",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
}
2023-08-01 16:33:25 +08:00
},
"example": {
"script_url": "example_dataset",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
}
2023-08-01 16:33:25 +08:00
},
"guanaco": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "JosephusCheung/GuanacoDataset",
2023-12-12 15:00:37 +08:00
"ms_hub_url": "AI-ModelScope/GuanacoDataset"
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"belle_2m": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "BelleGroup/train_2M_CN",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/train_2M_CN"
2023-08-01 16:33:25 +08:00
},
"belle_1m": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "BelleGroup/train_1M_CN",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/train_1M_CN"
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"belle_0.5m": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "BelleGroup/train_0.5M_CN",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/train_0.5M_CN"
2023-08-01 16:33:25 +08:00
},
"belle_dialog": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "BelleGroup/generated_chat_0.4M",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/generated_chat_0.4M"
2023-08-01 16:33:25 +08:00
},
"belle_math": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "BelleGroup/school_math_0.25M",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/school_math_0.25M"
2023-08-01 16:33:25 +08:00
},
"belle_multiturn": {
"script_url": "belle_multiturn",
2023-11-16 02:08:04 +08:00
"formatting": "sharegpt"
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"ultra_chat": {
"script_url": "ultra_chat",
2023-11-09 15:53:23 +08:00
"formatting": "sharegpt"
2023-11-02 23:10:04 +08:00
},
"open_platypus": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "garage-bAInd/Open-Platypus",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/Open-Platypus"
2023-11-02 23:10:04 +08:00
},
2023-09-13 22:30:14 +08:00
"codealpaca": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "sahil2801/CodeAlpaca-20k",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/CodeAlpaca-20k"
2023-09-13 22:30:14 +08:00
},
"alpaca_cot": {
2023-12-12 18:33:06 +08:00
"hf_hub_url": "QingyiSi/Alpaca-CoT",
"ms_hub_url": "AI-ModelScope/Alpaca-CoT"
2023-09-13 22:30:14 +08:00
},
2023-11-16 02:08:04 +08:00
"openorca": {
"hf_hub_url": "Open-Orca/OpenOrca",
2023-12-12 12:34:04 +08:00
"ms_hub_url": "AI-ModelScope/OpenOrca",
2023-11-16 02:08:04 +08:00
"columns": {
"prompt": "question",
2023-12-12 19:45:59 +08:00
"response": "response",
"system": "system_prompt"
2023-11-16 02:08:04 +08:00
}
},
2023-09-13 22:30:14 +08:00
"mathinstruct": {
"hf_hub_url": "TIGER-Lab/MathInstruct",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/MathInstruct",
2023-09-13 22:30:14 +08:00
"columns": {
"prompt": "instruction",
2023-11-02 23:10:04 +08:00
"response": "output"
2023-09-13 22:30:14 +08:00
}
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"firefly": {
"hf_hub_url": "YeungNLP/firefly-train-1.1M",
2023-08-01 16:33:25 +08:00
"columns": {
"prompt": "input",
2023-11-02 23:10:04 +08:00
"response": "target"
}
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"webqa": {
"hf_hub_url": "suolyer/webqa",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/webqa",
2023-08-01 16:33:25 +08:00
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "input",
"response": "output"
}
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"webnovel": {
2023-12-08 18:00:57 +08:00
"hf_hub_url": "zxbsmk/webnovel_cn",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/webnovel_cn"
2023-08-01 16:33:25 +08:00
},
2023-12-01 15:34:50 +08:00
"nectar_sft": {
"hf_hub_url": "mlinmg/SFT-Nectar"
},
"adgen": {
2023-08-27 20:35:32 +08:00
"hf_hub_url": "HasturOfficial/adgen",
2023-12-12 12:44:15 +08:00
"ms_hub_url": "AI-ModelScope/adgen",
2023-08-27 20:35:32 +08:00
"columns": {
"prompt": "content",
2023-11-02 23:10:04 +08:00
"response": "summary"
}
2023-08-27 20:35:32 +08:00
},
2023-11-02 23:10:04 +08:00
"sharegpt_hyper": {
"hf_hub_url": "totally-not-an-llm/sharegpt-hyperfiltered-3k",
"formatting": "sharegpt"
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"sharegpt4": {
"hf_hub_url": "shibing624/sharegpt_gpt4",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/sharegpt_gpt4",
2023-11-02 23:10:04 +08:00
"formatting": "sharegpt"
},
"ultrachat_200k": {
"hf_hub_url": "HuggingFaceH4/ultrachat_200k",
2023-12-12 12:44:15 +08:00
"ms_hub_url": "AI-ModelScope/ultrachat_200k",
2023-11-02 23:10:04 +08:00
"columns": {
2023-12-12 18:33:06 +08:00
"messages": "messages",
"role": "role",
"content": "content"
2023-11-02 23:10:04 +08:00
},
"formatting": "sharegpt"
},
"agent_instruct": {
"hf_hub_url": "THUDM/AgentInstruct",
2023-12-12 18:33:06 +08:00
"ms_hub_url": "ZhipuAI/AgentInstruct",
2023-11-02 23:10:04 +08:00
"formatting": "sharegpt"
},
"lmsys_chat": {
"hf_hub_url": "lmsys/lmsys-chat-1m",
2023-12-12 19:45:59 +08:00
"ms_hub_url": "AI-ModelScope/lmsys-chat-1m",
2023-11-02 23:10:04 +08:00
"columns": {
2023-12-12 18:33:06 +08:00
"messages": "conversation",
"role": "role",
"content": "content"
2023-11-02 23:10:04 +08:00
},
"formatting": "sharegpt"
},
"evol_instruct": {
"hf_hub_url": "WizardLM/WizardLM_evol_instruct_V2_196k",
"formatting": "sharegpt"
2023-08-01 16:33:25 +08:00
},
"hh_rlhf_en": {
"script_url": "hh_rlhf_en",
"columns": {
"prompt": "instruction",
"response": "output",
"history": "history"
},
"ranking": true
2023-08-01 16:33:25 +08:00
},
"oaast_rm": {
"file_name": "oaast_rm.json",
"file_sha1": "622d420e9b70003b210618253bd3d9d2891d86cb",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
},
"ranking": true
2023-08-01 16:33:25 +08:00
},
"oaast_rm_zh": {
"file_name": "oaast_rm_zh.json",
"file_sha1": "1065af1f3784dd61be5e79713a35f427b713a232",
"columns": {
"prompt": "instruction",
"query": "input",
"response": "output",
"history": "history"
},
"ranking": true
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"comparison_gpt4_en": {
"file_name": "comparison_gpt4_data_en.json",
"file_sha1": "96fa18313544e22444fe20eead7754b17da452ae",
"ranking": true
},
"comparison_gpt4_zh": {
"file_name": "comparison_gpt4_data_zh.json",
"file_sha1": "515b18ed497199131ddcc1af950345c11dc5c7fd",
"ranking": true
},
2023-12-01 15:34:50 +08:00
"nectar_rm": {
"hf_hub_url": "mlinmg/RLAIF-Nectar",
"ranking": true
},
2023-08-01 16:33:25 +08:00
"wiki_demo": {
"file_name": "wiki_demo.txt",
2023-11-17 23:19:12 +08:00
"file_sha1": "e70375e28eda542a90c68213640cc371898ce181",
2023-08-01 16:33:25 +08:00
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "text"
}
2023-08-01 16:33:25 +08:00
},
"refinedweb": {
"hf_hub_url": "tiiuae/falcon-refinedweb",
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "content"
}
2023-08-01 16:33:25 +08:00
},
2023-11-02 23:10:04 +08:00
"redpajama_v2": {
"hf_hub_url": "togethercomputer/RedPajama-Data-V2",
"columns": {
"prompt": "raw_content"
},
"subset": "default"
},
2023-08-01 16:33:25 +08:00
"wikipedia_en": {
"hf_hub_url": "olm/olm-wikipedia-20221220",
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "text"
}
2023-08-01 16:33:25 +08:00
},
"wikipedia_zh": {
"hf_hub_url": "pleisto/wikipedia-cn-20230720-filtered",
2023-12-08 18:13:26 +08:00
"ms_hub_url": "AI-ModelScope/wikipedia-cn-20230720-filtered",
2023-08-01 16:33:25 +08:00
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "completion"
}
},
"pile": {
"hf_hub_url": "EleutherAI/pile",
"columns": {
"prompt": "text"
},
"subset": "all"
},
"skypile": {
"hf_hub_url": "Skywork/SkyPile-150B",
"columns": {
"prompt": "text"
}
},
"the_stack": {
"hf_hub_url": "bigcode/the-stack",
2023-12-12 18:33:06 +08:00
"ms_hub_url": "AI-ModelScope/the-stack",
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "content"
}
},
2023-12-09 20:53:18 +08:00
"starcoder_python": {
"hf_hub_url": "bigcode/starcoderdata",
2023-12-12 18:33:06 +08:00
"ms_hub_url": "AI-ModelScope/starcoderdata",
"columns": {
2023-11-02 23:10:04 +08:00
"prompt": "content"
2023-12-09 20:53:18 +08:00
},
"folder": "python"
2023-08-01 16:33:25 +08:00
}
}