[Fix] Fix a bug in internlm2 series configs (#977)

This commit is contained in:
Jingming 2024-03-15 15:21:35 +08:00 committed by GitHub
parent 7baa711fc7
commit c2d4717be2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 10 additions and 2 deletions

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -3,8 +3,8 @@ from opencompass.models import HuggingFaceCausalLM
_meta_template = dict( _meta_template = dict(
round=[ round=[
dict(role='HUMAN', begin='<|im_start|>user\n', end='<|im_end|>\n'),
dict(role='SYSTEM', begin='<|im_start|>system\n', end='<|im_end|>\n'), dict(role='SYSTEM', begin='<|im_start|>system\n', end='<|im_end|>\n'),
dict(role='HUMAN', begin='<|im_start|>user\n', end='<|im_end|>\n'),
dict(role='BOT', begin='<|im_start|>assistant\n', end='<|im_end|>\n', generate=True), dict(role='BOT', begin='<|im_start|>assistant\n', end='<|im_end|>\n', generate=True),
], ],
eos_token_id=92542 eos_token_id=92542
@ -32,5 +32,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -31,5 +31,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]

View File

@ -3,8 +3,8 @@ from opencompass.models import HuggingFaceCausalLM
_meta_template = dict( _meta_template = dict(
round=[ round=[
dict(role='HUMAN', begin='<|im_start|>user\n', end='<|im_end|>\n'),
dict(role='SYSTEM', begin='<|im_start|>system\n', end='<|im_end|>\n'), dict(role='SYSTEM', begin='<|im_start|>system\n', end='<|im_end|>\n'),
dict(role='HUMAN', begin='<|im_start|>user\n', end='<|im_end|>\n'),
dict(role='BOT', begin='<|im_start|>assistant\n', end='<|im_end|>\n', generate=True), dict(role='BOT', begin='<|im_start|>assistant\n', end='<|im_end|>\n', generate=True),
], ],
eos_token_id=92542 eos_token_id=92542
@ -32,5 +32,6 @@ models = [
meta_template=_meta_template, meta_template=_meta_template,
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
end_str='<|im_end|>', end_str='<|im_end|>',
generation_kwargs = {"eos_token_id": [2, 92542]},
) )
] ]