fix erro in configs (#750)

This commit is contained in:
bittersweet1999 2023-12-28 19:53:07 +08:00 committed by GitHub
parent 81098722d2
commit 8728287a55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 27 additions and 27 deletions

View File

@ -28,7 +28,7 @@ for _name in subjective_all_sets:
]),
),
retriever=dict(type=ZeroRetriever),
inferencer=dict(type=GenInferencer max_out_len=2048),
inferencer=dict(type=GenInferencer, max_out_len=2048),
)
subjective_eval_cfg = dict(

View File

@ -16,8 +16,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -3,15 +3,15 @@ from opencompass.models import HuggingFaceCausalLM
models = [dict(
type=HuggingFaceCausalLM,
abbr='autoj-13b-GPTQ-4bits',
path="GAIR/autoj-13b-GPTQ-4bits",
tokenizer_path='GAIR/autoj-13b-GPTQ-4bits',
abbr='autoj-13b',
path="GAIR/autoj-13b",
tokenizer_path='GAIR/autoj-13b',
tokenizer_kwargs=dict(padding_side='left',
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -8,16 +8,16 @@ https://huggingface.co/GAIR/autoj-13b-GPTQ-4bits
models = [dict(
type=HuggingFaceCausalLM,
abbr='autoj-13b',
path="GAIR/autoj-13b",
tokenizer_path='GAIR/autoj-13b',
abbr='autoj-13b-GPTQ-4bits',
path="GAIR/autoj-13b-GPTQ-4bits",
tokenizer_path='GAIR/autoj-13b-GPTQ-4bits',
tokenizer_kwargs=dict(padding_side='left',
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),
)]
)]

View File

@ -10,8 +10,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -10,8 +10,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -10,9 +10,9 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),
run_cfg=dict(num_gpus=4, num_procs=1),
)]

View File

@ -10,8 +10,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -10,8 +10,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),

View File

@ -10,8 +10,8 @@ models = [dict(
truncation_side='left',
trust_remote_code=True,
use_fast=False,),
max_out_len=512,
max_seq_len=2048,
max_out_len=1024,
max_seq_len=4096,
batch_size=8,
model_kwargs=dict(device_map='auto', trust_remote_code=True),
run_cfg=dict(num_gpus=1, num_procs=1),