OpenCompass/configs/models/qwen/vllm_qwen1_5_72b_chat.py

26 lines
669 B
Python
Raw Normal View History

from opencompass.models import VLLM
_meta_template = dict(
round=[
2024-05-14 15:35:58 +08:00
dict(role='HUMAN', begin='<|im_start|>user\n', end='<|im_end|>\n'),
dict(role='BOT', begin='<|im_start|>assistant\n', end='<|im_end|>\n', generate=True),
],
)
models = [
dict(
type=VLLM,
abbr='qwen1.5-72b-chat-vllm',
2024-05-14 15:35:58 +08:00
path='Qwen/Qwen1.5-72B-Chat',
model_kwargs=dict(tensor_parallel_size=4),
meta_template=_meta_template,
max_out_len=100,
max_seq_len=2048,
batch_size=32,
generation_kwargs=dict(temperature=0),
2024-05-29 10:14:08 +08:00
stop_words=['<|im_end|>'],
run_cfg=dict(num_gpus=4, num_procs=1),
)
]