2023-07-06 16:02:39 +08:00
|
|
|
from opencompass.models import HuggingFaceCausalLM
|
|
|
|
|
|
|
|
|
|
|
|
models = [
|
|
|
|
dict(
|
|
|
|
type=HuggingFaceCausalLM,
|
2023-09-08 15:41:20 +08:00
|
|
|
abbr='vicuna-33b-v1.3-hf',
|
2023-07-06 16:02:39 +08:00
|
|
|
path="lmsys/vicuna-33b-v1.3",
|
|
|
|
tokenizer_path='lmsys/vicuna-33b-v1.3',
|
|
|
|
tokenizer_kwargs=dict(
|
|
|
|
padding_side='left',
|
|
|
|
truncation_side='left',
|
|
|
|
use_fast=False,
|
|
|
|
),
|
|
|
|
max_out_len=100,
|
|
|
|
max_seq_len=2048,
|
|
|
|
batch_size=8,
|
|
|
|
model_kwargs=dict(device_map='auto'),
|
|
|
|
batch_padding=False, # if false, inference with for-loop without batch padding
|
2023-11-23 14:05:59 +08:00
|
|
|
use_fastchat_template=True,
|
2023-07-06 16:02:39 +08:00
|
|
|
run_cfg=dict(num_gpus=4, num_procs=1)
|
|
|
|
)
|
|
|
|
]
|