Add tool_format to overwrite tool formatter template

This commit is contained in:
mMrBun 2024-06-22 02:00:13 +08:00
parent db9a1912e3
commit 20e2e6fdcb
4 changed files with 10 additions and 3 deletions

View File

@ -54,7 +54,7 @@ class HuggingfaceEngine(BaseEngine):
self.tokenizer = tokenizer_module["tokenizer"] self.tokenizer = tokenizer_module["tokenizer"]
self.processor = tokenizer_module["processor"] self.processor = tokenizer_module["processor"]
self.tokenizer.padding_side = "left" if self.can_generate else "right" self.tokenizer.padding_side = "left" if self.can_generate else "right"
self.template = get_template_and_fix_tokenizer(self.tokenizer, data_args.template) self.template = get_template_and_fix_tokenizer(self.tokenizer, data_args.template, data_args.tool_format)
self.model = load_model( self.model = load_model(
self.tokenizer, model_args, finetuning_args, is_trainable=False, add_valuehead=(not self.can_generate) self.tokenizer, model_args, finetuning_args, is_trainable=False, add_valuehead=(not self.can_generate)
) # must after fixing tokenizer to resize vocab ) # must after fixing tokenizer to resize vocab

View File

@ -59,7 +59,7 @@ class VllmEngine(BaseEngine):
self.tokenizer = tokenizer_module["tokenizer"] self.tokenizer = tokenizer_module["tokenizer"]
self.processor = tokenizer_module["processor"] self.processor = tokenizer_module["processor"]
self.tokenizer.padding_side = "left" self.tokenizer.padding_side = "left"
self.template = get_template_and_fix_tokenizer(self.tokenizer, data_args.template) self.template = get_template_and_fix_tokenizer(self.tokenizer, data_args.template, data_args.tool_format)
self.generating_args = generating_args.to_dict() self.generating_args = generating_args.to_dict()
engine_args = { engine_args = {

View File

@ -379,6 +379,7 @@ def _get_jinja_template(template: "Template", tokenizer: "PreTrainedTokenizer")
def get_template_and_fix_tokenizer( def get_template_and_fix_tokenizer(
tokenizer: "PreTrainedTokenizer", tokenizer: "PreTrainedTokenizer",
name: Optional[str] = None, name: Optional[str] = None,
tool_format: Optional[str] = None,
) -> Template: ) -> Template:
if name is None: if name is None:
template = TEMPLATES["empty"] # placeholder template = TEMPLATES["empty"] # placeholder
@ -386,6 +387,9 @@ def get_template_and_fix_tokenizer(
template = TEMPLATES.get(name, None) template = TEMPLATES.get(name, None)
if template is None: if template is None:
raise ValueError("Template {} does not exist.".format(name)) raise ValueError("Template {} does not exist.".format(name))
if tool_format:
template.format_tools = ToolFormatter(tool_format=tool_format)
stop_words = template.stop_words stop_words = template.stop_words
if template.replace_eos: if template.replace_eos:
@ -660,7 +664,6 @@ _register_template(
format_system=StringFormatter(slots=["<|system|>\n{{content}}"]), format_system=StringFormatter(slots=["<|system|>\n{{content}}"]),
format_function=FunctionFormatter(slots=["{{name}}\n{{arguments}}"]), format_function=FunctionFormatter(slots=["{{name}}\n{{arguments}}"]),
format_observation=StringFormatter(slots=["<|observation|>\n{{content}}<|assistant|>"]), format_observation=StringFormatter(slots=["<|observation|>\n{{content}}<|assistant|>"]),
format_tools=ToolFormatter(tool_format="glm4"),
format_prefix=EmptyFormatter(slots=["[gMASK]<sop>"]), format_prefix=EmptyFormatter(slots=["[gMASK]<sop>"]),
stop_words=["<|user|>", "<|observation|>"], stop_words=["<|user|>", "<|observation|>"],
efficient_eos=True, efficient_eos=True,

View File

@ -29,6 +29,10 @@ class DataArguments:
default=None, default=None,
metadata={"help": "Which template to use for constructing prompts in training and inference."}, metadata={"help": "Which template to use for constructing prompts in training and inference."},
) )
tool_format: Optional[str] = field(
default=None,
metadata={"help": "Specifies the tool format template for function calling ."},
)
dataset: Optional[str] = field( dataset: Optional[str] = field(
default=None, default=None,
metadata={"help": "The name of provided dataset(s) to use. Use commas to separate multiple datasets."}, metadata={"help": "The name of provided dataset(s) to use. Use commas to separate multiple datasets."},