2023-07-06 16:02:39 +08:00
|
|
|
from opencompass.models import HuggingFaceCausalLM
|
|
|
|
|
|
|
|
|
|
|
|
models = [
|
|
|
|
dict(
|
|
|
|
type=HuggingFaceCausalLM,
|
2023-09-08 15:41:20 +08:00
|
|
|
abbr='vicuna-7b-v1.5-16k-hf',
|
|
|
|
path="lmsys/vicuna-7b-v1.5-16k",
|
|
|
|
tokenizer_path='lmsys/vicuna-7b-v1.5-16k',
|
2023-07-06 16:02:39 +08:00
|
|
|
tokenizer_kwargs=dict(
|
|
|
|
padding_side='left',
|
|
|
|
truncation_side='left',
|
|
|
|
use_fast=False,
|
|
|
|
),
|
|
|
|
max_out_len=100,
|
2023-09-08 15:41:20 +08:00
|
|
|
max_seq_len=8192,
|
2023-07-06 16:02:39 +08:00
|
|
|
batch_size=8,
|
|
|
|
model_kwargs=dict(device_map='auto'),
|
|
|
|
batch_padding=False, # if false, inference with for-loop without batch padding
|
2023-11-23 14:05:59 +08:00
|
|
|
use_fastchat_template=True,
|
2023-07-06 16:02:39 +08:00
|
|
|
run_cfg=dict(num_gpus=1, num_procs=1)
|
|
|
|
)
|
|
|
|
]
|