Fix VLLM argument error (#1207)

This commit is contained in:
Xu Song 2024-05-29 10:14:08 +08:00 committed by GitHub
parent 2954913d9b
commit 808582d952
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 14 additions and 14 deletions

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]

View File

@ -20,7 +20,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
) )
] ]

View File

@ -21,6 +21,6 @@ models = [
max_seq_len=4096, max_seq_len=4096,
batch_size=32, batch_size=32,
run_cfg=dict(num_gpus=4, num_procs=1), run_cfg=dict(num_gpus=4, num_procs=1),
end_str='<|endoftext|>', stop_words=['<|endoftext|>'],
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='<|im_end|>', stop_words=['<|im_end|>'],
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='<|im_end|>', stop_words=['<|im_end|>'],
run_cfg=dict(num_gpus=4, num_procs=1), run_cfg=dict(num_gpus=4, num_procs=1),
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='<|im_end|>', stop_words=['<|im_end|>'],
run_cfg=dict(num_gpus=4, num_procs=1), run_cfg=dict(num_gpus=4, num_procs=1),
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='<|im_end|>', stop_words=['<|im_end|>'],
run_cfg=dict(num_gpus=4, num_procs=1), run_cfg=dict(num_gpus=4, num_procs=1),
) )
] ]

View File

@ -17,7 +17,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=2, num_procs=1), run_cfg=dict(num_gpus=2, num_procs=1),
) )
] ]

View File

@ -17,7 +17,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]

View File

@ -18,7 +18,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=1, batch_size=1,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]

View File

@ -19,7 +19,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=4, num_procs=1), run_cfg=dict(num_gpus=4, num_procs=1),
) )
] ]

View File

@ -18,7 +18,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]

View File

@ -17,7 +17,7 @@ models = [
max_seq_len=2048, max_seq_len=2048,
batch_size=32, batch_size=32,
generation_kwargs=dict(temperature=0), generation_kwargs=dict(temperature=0),
end_str='</s>', stop_words=['</s>'],
run_cfg=dict(num_gpus=1, num_procs=1), run_cfg=dict(num_gpus=1, num_procs=1),
) )
] ]