OpenCompass/configs/models/hf_internlm/lmdeploy_internlm_chat_7b.py

16 lines
486 B
Python

from opencompass.models import TurboMindModelwithChatTemplate
models = [
dict(
type=TurboMindModelwithChatTemplate,
abbr='internlm-chat-7b-turbomind',
path='internlm/internlm-chat-7b',
engine_config=dict(session_len=4096, max_batch_size=16, tp=1),
gen_config=dict(top_k=1, temperature=1e-6, top_p=0.9, max_new_tokens=2048),
max_seq_len=4096,
max_out_len=2048,
batch_size=16,
run_cfg=dict(num_gpus=1),
)
]