[Enhancement] Increase default task size (#360)

This commit is contained in:
Tong Gao 2023-09-05 10:38:13 +08:00 committed by GitHub
parent b8bf16e81c
commit 5d75c1bbb9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 2 deletions

View File

@ -29,7 +29,7 @@ class SizePartitioner(BasePartitioner):
def __init__(self, def __init__(self,
out_dir: str, out_dir: str,
max_task_size: int = 2000, max_task_size: int = 40000,
gen_task_coef: int = 20, gen_task_coef: int = 20,
dataset_size_path: str = '.cache/dataset_size.json'): dataset_size_path: str = '.cache/dataset_size.json'):
super().__init__(out_dir) super().__init__(out_dir)

2
run.py
View File

@ -91,7 +91,7 @@ def parse_args():
help='The maximum size of an infer task. Only ' help='The maximum size of an infer task. Only '
'effective when "infer" is missing from the config.', 'effective when "infer" is missing from the config.',
type=int, type=int,
default=2000), default=40000),
parser.add_argument( parser.add_argument(
'--gen-task-coef', '--gen-task-coef',
help='The dataset cost measurement coefficient for generation tasks, ' help='The dataset cost measurement coefficient for generation tasks, '